hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ffacc420912dd26001ac277b04f7f85b2a74225c | 1,239 | exs | Elixir | refuge/config/prod.secret.exs | herminiotorres/pragmaticstudio | 273647694519fd4149716abf190eb8d97102f488 | [
"MIT"
] | null | null | null | refuge/config/prod.secret.exs | herminiotorres/pragmaticstudio | 273647694519fd4149716abf190eb8d97102f488 | [
"MIT"
] | 1 | 2020-02-26T14:55:23.000Z | 2020-02-26T14:55:23.000Z | daniel/pragstudio/refuge/config/prod.secret.exs | jdashton/glowing-succotash | 44580c2d4cb300e33156d42e358e8a055948a079 | [
"MIT"
] | null | null | null | # In this file, we load production configuration and secrets
# from environment variables. You can also hardcode secrets,
# although such is generally not recommended and you have to
# remember to add this file to your .gitignore.
use Mix.Config
database_url =
System.get_env("DATABASE_URL") ||
raise """
environment variable DATABASE_URL is missing.
For example: ecto://USER:PASS@HOST/DATABASE
"""
config :refuge, Refuge.Repo,
# ssl: true,
url: database_url,
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10")
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
"""
config :refuge, RefugeWeb.Endpoint,
http: [
port: String.to_integer(System.get_env("PORT") || "4000"),
transport_options: [socket_opts: [:inet6]]
],
secret_key_base: secret_key_base
# ## Using releases (Elixir v1.9+)
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start each relevant endpoint:
#
# config :refuge, RefugeWeb.Endpoint, server: true
#
# Then you can assemble a release by calling `mix release`.
# See `mix help release` for more information.
| 29.5 | 67 | 0.717514 |
ffad0626c55bf1a8ab03b5d18cf46f35f87f7c83 | 6,604 | ex | Elixir | lib/tesla.ex | RamanBut-Husaim/tesla | d618115c27950cac8e7c1b4ee305d7df575b2383 | [
"MIT"
] | 1 | 2021-04-19T06:48:59.000Z | 2021-04-19T06:48:59.000Z | lib/tesla.ex | sdn90/tesla | 4c59ef6ca7dac632a474c054f02c3323504bd03c | [
"MIT"
] | null | null | null | lib/tesla.ex | sdn90/tesla | 4c59ef6ca7dac632a474c054f02c3323504bd03c | [
"MIT"
] | null | null | null | defmodule Tesla.Error do
defexception env: nil, stack: [], reason: nil
def message(%Tesla.Error{env: %{url: url, method: method}, reason: reason}) do
"#{inspect(reason)} (#{method |> to_string |> String.upcase()} #{url})"
end
end
defmodule Tesla.Env do
@type client :: Tesla.Client.t()
@type method :: :head | :get | :delete | :trace | :options | :post | :put | :patch
@type url :: binary
@type param :: binary | [{binary | atom, param}]
@type query :: [{binary | atom, param}]
@type headers :: [{binary, binary}]
@type body :: any
@type status :: integer
@type opts :: [any]
@type stack :: [{atom, atom, any} | {atom, atom} | {:fn, (t -> t)} | {:fn, (t, stack -> t)}]
@type result :: {:ok, t()} | {:error, any}
@type t :: %__MODULE__{
method: method,
query: query,
url: url,
headers: headers,
body: body,
status: status,
opts: opts,
__module__: atom,
__client__: client
}
defstruct method: nil,
url: "",
query: [],
headers: [],
body: nil,
status: nil,
opts: [],
__module__: nil,
__client__: nil
end
defmodule Tesla.Client do
@type t :: %__MODULE__{
pre: Tesla.Env.stack(),
post: Tesla.Env.stack()
}
defstruct fun: nil,
pre: [],
post: []
end
defmodule Tesla.Middleware do
@callback call(env :: Tesla.Env.t(), next :: Tesla.Env.stack(), options :: any) ::
Tesla.Env.result()
end
defmodule Tesla.Adapter do
@callback call(env :: Tesla.Env.t(), options :: any) :: Tesla.Env.result()
def opts(defaults \\ [], env, opts) do
defaults
|> Keyword.merge(opts || [])
|> Keyword.merge(env.opts[:adapter] || [])
end
end
defmodule Tesla do
use Tesla.Builder
alias Tesla.Env
require Tesla.Adapter.Httpc
@default_adapter Tesla.Adapter.Httpc
@moduledoc """
A HTTP toolkit for building API clients using middlewares
Include Tesla module in your api client:
```
defmodule ExampleApi do
use Tesla
plug Tesla.Middleware.BaseUrl, "http://api.example.com"
plug Tesla.Middleware.JSON
end
"""
defmacro __using__(opts \\ []) do
quote do
use Tesla.Builder, unquote(opts)
end
end
@doc false
def execute(module, client, options) do
{env, stack} = prepare(module, client, options)
run(env, stack)
end
@doc false
def execute!(module, client, options) do
{env, stack} = prepare(module, client, options)
case run(env, stack) do
{:ok, env} -> env
{:error, error} -> raise Tesla.Error, env: env, stack: stack, reason: error
end
end
defp prepare(module, %{pre: pre, post: post} = client, options) do
env = struct(Env, options ++ [__module__: module, __client__: client])
stack = pre ++ module.__middleware__ ++ post ++ [effective_adapter(module)]
{env, stack}
end
@doc false
def effective_adapter(module) do
with nil <- adapter_per_module_from_config(module),
nil <- adapter_per_module(module),
nil <- adapter_from_config() do
adapter_default()
end
end
defp adapter_per_module_from_config(module) do
case Application.get_env(:tesla, module, [])[:adapter] do
nil -> nil
{adapter, opts} -> {adapter, :call, [opts]}
adapter -> {adapter, :call, [[]]}
end
end
defp adapter_per_module(module) do
module.__adapter__
end
defp adapter_from_config do
case Application.get_env(:tesla, :adapter) do
nil -> nil
{adapter, opts} -> {adapter, :call, [opts]}
adapter -> {adapter, :call, [[]]}
end
end
defp adapter_default do
{@default_adapter, :call, [[]]}
end
def run_default_adapter(env, opts \\ []) do
apply(@default_adapter, :call, [env, opts])
end
# empty stack case is useful for reusing/testing middlewares (just pass [] as next)
def run(env, []), do: {:ok, env}
# last item in stack is adapter - skip passing rest of stack
def run(env, [{:fn, f}]), do: apply(f, [env])
def run(env, [{m, f, a}]), do: apply(m, f, [env | a])
# for all other elements pass (env, next, opts)
def run(env, [{:fn, f} | rest]), do: apply(f, [env, rest])
def run(env, [{m, f, a} | rest]), do: apply(m, f, [env, rest | a])
# useful helper fuctions
def put_opt(env, key, value) do
Map.update!(env, :opts, &Keyword.put(&1, key, value))
end
@spec get_header(Env.t(), binary) :: binary | nil
def get_header(%Env{headers: headers}, key) do
case List.keyfind(headers, key, 0) do
{_, value} -> value
_ -> nil
end
end
@spec get_headers(Env.t(), binary) :: [binary]
def get_headers(%Env{headers: headers}, key) do
for {k, v} <- headers, k == key, do: v
end
@spec put_header(Env.t(), binary, binary) :: Env.t()
def put_header(%Env{} = env, key, value) do
headers = List.keystore(env.headers, key, 0, {key, value})
%{env | headers: headers}
end
@spec put_headers(Env.t(), [{binary, binary}]) :: Env.t()
def put_headers(%Env{} = env, list) when is_list(list) do
%{env | headers: env.headers ++ list}
end
@spec delete_header(Env.t(), binary) :: Env.t()
def delete_header(%Env{} = env, key) do
headers = for {k, v} <- env.headers, k != key, do: {k, v}
%{env | headers: headers}
end
@spec put_body(Env.t(), Env.body()) :: Env.t()
def put_body(%Env{} = env, body), do: %{env | body: body}
@doc """
Dynamically build client from list of middlewares.
```
defmodule ExampleAPI do
use Tesla
def new(token) do
Tesla.build_client([
{Tesla.Middleware.Headers, [{"authorization", token}]
])
end
end
client = ExampleAPI.new(token: "abc")
client |> ExampleAPI.get("/me")
```
"""
def build_client(pre, post \\ []), do: Tesla.Builder.client(pre, post)
def build_adapter(fun), do: Tesla.Builder.client([], [fn env, _next -> fun.(env) end])
def build_url(url, []), do: url
def build_url(url, query) do
join = if String.contains?(url, "?"), do: "&", else: "?"
url <> join <> encode_query(query)
end
defp encode_query(query) do
query
|> Enum.flat_map(&encode_pair/1)
|> URI.encode_query()
end
@doc false
def encode_pair({key, value}) when is_list(value) do
if Keyword.keyword?(value) do
Enum.flat_map(value, fn {k, v} -> encode_pair({"#{key}[#{k}]", v}) end)
else
Enum.map(value, fn e -> {"#{key}[]", e} end)
end
end
@doc false
def encode_pair({key, value}), do: [{key, value}]
end
| 25.898039 | 94 | 0.592217 |
ffad427c9d3c5bd07af07fd69208e205fef70779 | 537 | ex | Elixir | samples/client/petstore/elixir/lib/swagger_petstore/model/dog.ex | lob/swagger-codegen | cd4aaa272342b473f940576913d38378f0392991 | [
"Apache-2.0"
] | null | null | null | samples/client/petstore/elixir/lib/swagger_petstore/model/dog.ex | lob/swagger-codegen | cd4aaa272342b473f940576913d38378f0392991 | [
"Apache-2.0"
] | 1 | 2022-01-06T22:28:02.000Z | 2022-01-06T22:28:02.000Z | samples/client/petstore/elixir/lib/swagger_petstore/model/dog.ex | lob/swagger-codegen | cd4aaa272342b473f940576913d38378f0392991 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule SwaggerPetstore.Model.Dog do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:className,
:color,
:breed
]
@type t :: %__MODULE__{
:className => String.t,
:color => String.t,
:breed => String.t
}
end
defimpl Poison.Decoder, for: SwaggerPetstore.Model.Dog do
def decode(value, _options) do
value
end
end
| 17.9 | 75 | 0.664804 |
ffad444c12d94ffb82aaa2ef0eba950fbeadd2f2 | 65 | ex | Elixir | todoList/lib/todoList_web/views/layout_view.ex | Mateusz-Stempniewicz/elixir-restservice | 2a8f1e7950fd33b202174ea1b2cb2ba0f1620416 | [
"MIT"
] | null | null | null | todoList/lib/todoList_web/views/layout_view.ex | Mateusz-Stempniewicz/elixir-restservice | 2a8f1e7950fd33b202174ea1b2cb2ba0f1620416 | [
"MIT"
] | null | null | null | todoList/lib/todoList_web/views/layout_view.ex | Mateusz-Stempniewicz/elixir-restservice | 2a8f1e7950fd33b202174ea1b2cb2ba0f1620416 | [
"MIT"
] | null | null | null | defmodule TodoListWeb.LayoutView do
use TodoListWeb, :view
end
| 16.25 | 35 | 0.815385 |
ffad55277c3cb16ef7c5470cb6cc1e0a4c8b9308 | 1,890 | exs | Elixir | phoenix_backend/test/phoenix_backend/content_test.exs | raphaklaus/opentelemetry-examples | 099fc6cf52bdd337f359530e12007050aa835b35 | [
"Apache-2.0"
] | 4 | 2020-03-15T10:51:31.000Z | 2020-04-24T07:07:01.000Z | phoenix_backend/test/phoenix_backend/content_test.exs | raphaklaus/opentelemetry-examples | 099fc6cf52bdd337f359530e12007050aa835b35 | [
"Apache-2.0"
] | 8 | 2020-06-27T22:05:24.000Z | 2022-02-16T09:23:41.000Z | phoenix_backend/test/phoenix_backend/content_test.exs | raphaklaus/opentelemetry-examples | 099fc6cf52bdd337f359530e12007050aa835b35 | [
"Apache-2.0"
] | 5 | 2020-10-27T00:59:26.000Z | 2021-11-23T17:22:23.000Z | defmodule PhoenixBackend.ContentTest do
use PhoenixBackend.DataCase
alias PhoenixBackend.Content
describe "posts" do
alias PhoenixBackend.Content.Post
@valid_attrs %{title: "some title"}
@update_attrs %{title: "some updated title"}
@invalid_attrs %{title: nil}
def post_fixture(attrs \\ %{}) do
{:ok, post} =
attrs
|> Enum.into(@valid_attrs)
|> Content.create_post()
post
end
test "list_posts/0 returns all posts" do
post = post_fixture()
assert Content.list_posts() == [post]
end
test "get_post!/1 returns the post with given id" do
post = post_fixture()
assert Content.get_post!(post.id) == post
end
test "create_post/1 with valid data creates a post" do
assert {:ok, %Post{} = post} = Content.create_post(@valid_attrs)
assert post.title == "some title"
end
test "create_post/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Content.create_post(@invalid_attrs)
end
test "update_post/2 with valid data updates the post" do
post = post_fixture()
assert {:ok, %Post{} = post} = Content.update_post(post, @update_attrs)
assert post.title == "some updated title"
end
test "update_post/2 with invalid data returns error changeset" do
post = post_fixture()
assert {:error, %Ecto.Changeset{}} = Content.update_post(post, @invalid_attrs)
assert post == Content.get_post!(post.id)
end
test "delete_post/1 deletes the post" do
post = post_fixture()
assert {:ok, %Post{}} = Content.delete_post(post)
assert_raise Ecto.NoResultsError, fn -> Content.get_post!(post.id) end
end
test "change_post/1 returns a post changeset" do
post = post_fixture()
assert %Ecto.Changeset{} = Content.change_post(post)
end
end
end
| 29.076923 | 84 | 0.654497 |
ffadad78b550f5b153a5dbbe8032757f767f9fe4 | 920 | exs | Elixir | config/prod.exs | yknx4/opencov | dc961a41e29b41b0657bc2a64bb67350a65477b8 | [
"MIT"
] | 8 | 2021-08-22T10:37:57.000Z | 2022-01-10T11:27:06.000Z | config/prod.exs | yknx4/librecov | dc961a41e29b41b0657bc2a64bb67350a65477b8 | [
"MIT"
] | 109 | 2021-08-20T04:08:04.000Z | 2022-01-03T07:39:18.000Z | config/prod.exs | Librecov/librecov | dc961a41e29b41b0657bc2a64bb67350a65477b8 | [
"MIT"
] | null | null | null | import Config
config :tesla, Tesla.Middleware.Logger, filter_headers: ["authorization"]
config :librecov, Librecov.Endpoint, cache_static_manifest: "priv/static/cache_manifest.json"
config :librecov, Librecov.Repo,
adapter: Ecto.Adapters.Postgres,
ssl: true
config :logger, :console, metadata: [:request_id, :mfa]
config :logger, level: :info
config Librecov.Plug.Github,
path: "/api/v1/github_webhook",
action: {Librecov.GithubService, :handle}
config :sentry,
enable_source_code_context: true,
root_source_code_path: File.cwd!(),
tags: %{
env: "production"
},
included_environments: ["prod", "staging"]
config :logger, Sentry.LoggerBackend,
level: :error,
excluded_domains: [],
capture_log_messages: true
config :event_bus,
error_handler: {Librecov.Helpers.SentryErrorLogger, :log}
if File.exists?(Path.join(__DIR__, "prod.secret.exs")) do
import_config "prod.secret.exs"
end
| 24.210526 | 93 | 0.744565 |
ffadbf92af5c9a76f97e4b79b0773fb5544930b2 | 1,210 | exs | Elixir | mix.exs | Ninigi/ecto_dripper | ca472e90aafd3313b91a268f81e6aa936c59e021 | [
"MIT"
] | 1 | 2018-06-28T11:15:14.000Z | 2018-06-28T11:15:14.000Z | mix.exs | Ninigi/ecto_dripper | ca472e90aafd3313b91a268f81e6aa936c59e021 | [
"MIT"
] | null | null | null | mix.exs | Ninigi/ecto_dripper | ca472e90aafd3313b91a268f81e6aa936c59e021 | [
"MIT"
] | null | null | null | defmodule EctoDripper.MixProject do
use Mix.Project
def project do
[
app: :ecto_dripper,
version: "1.0.0",
elixir: "~> 1.5",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
deps: deps(),
description: description(),
package: package()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ecto_sql, "~> 3.0"},
{:ex_doc, "~> 0.19", only: :dev, runtime: false}
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp description() do
"""
A simple way to create Ecto Queries.
Clean up and declutter your query modules by only writing out the necessary code - or no code at all.
"""
end
defp package() do
[
name: "ecto_dripper",
files: ["lib", "mix.exs", "README.md", "LICENSE.md", ".formatter.exs"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/ninigi/ecto_dripper"},
maintainers: ["Fabian Zitter"]
]
end
end
| 23.269231 | 105 | 0.590083 |
ffadc8d7db56ef2590edd62fd2f68a58169a0a2c | 5,851 | exs | Elixir | apps/reaper/test/unit/reaper/data_extract/schema_filler_test.exs | PillarTechnology/smartcitiesdata | 9420a26820e38267513cd1bfa82c7f5583222bb1 | [
"Apache-2.0"
] | 1 | 2021-04-05T19:17:18.000Z | 2021-04-05T19:17:18.000Z | apps/reaper/test/unit/reaper/data_extract/schema_filler_test.exs | PillarTechnology/smartcitiesdata | 9420a26820e38267513cd1bfa82c7f5583222bb1 | [
"Apache-2.0"
] | 11 | 2020-01-07T15:43:42.000Z | 2020-12-22T15:23:25.000Z | apps/reaper/test/unit/reaper/data_extract/schema_filler_test.exs | jakeprem/smartcitiesdata | da309ac0d2261527278951cbae88604455207589 | [
"Apache-2.0"
] | null | null | null | defmodule Reaper.DataExtract.SchemaFillerTest do
use ExUnit.Case
doctest SmartCity.Helpers
alias Reaper.DataExtract.SchemaFiller
describe "single level" do
setup do
basic_schema = [
%{name: "id", type: "string"},
%{
name: "parentMap",
type: "map",
subSchema: [%{name: "fieldA", type: "string"}, %{name: "fieldB", type: "string"}]
}
]
list_schema = [
%{name: "id", type: "string"},
%{
name: "parentList",
type: "list",
itemType: "string"
}
]
[
basic_schema: basic_schema,
list_schema: list_schema
]
end
test "nil map", %{basic_schema: schema} do
payload = %{"id" => "id", "parentMap" => nil}
expected = %{
"id" => "id",
"parentMap" => %{"fieldA" => nil, "fieldB" => nil}
}
actual = SchemaFiller.fill(schema, payload)
assert expected == actual
end
test "empty map", %{basic_schema: schema} do
payload = %{"id" => "id", "parentMap" => %{}}
expected = %{
"id" => "id",
"parentMap" => %{"fieldA" => nil, "fieldB" => nil}
}
actual = SchemaFiller.fill(schema, payload)
assert expected == actual
end
test "partial map", %{basic_schema: schema} do
payload = %{"id" => "id", "parentMap" => %{"fieldA" => "fieldA"}}
expected = %{
"id" => "id",
"parentMap" => %{"fieldA" => "fieldA", "fieldB" => nil}
}
actual = SchemaFiller.fill(schema, payload)
assert expected == actual
end
test "empty list", %{list_schema: schema} do
payload = %{"id" => "id", "parentList" => []}
expected = %{
"id" => "id",
"parentList" => []
}
actual = SchemaFiller.fill(schema, payload)
assert expected == actual
end
test "list with string item", %{list_schema: schema} do
payload = %{"id" => "id", "parentList" => ["thing"]}
expected = %{
"id" => "id",
"parentList" => ["thing"]
}
actual = SchemaFiller.fill(schema, payload)
assert expected == actual
end
end
describe "two levels" do
setup do
two_level_list_schema = [
%{name: "id", type: "string"},
%{
name: "parentList",
type: "list",
itemType: "map",
subSchema: [%{name: "fieldA", type: "string"}, %{name: "fieldB", type: "string"}]
}
]
nested_maps_schema = [
%{name: "id", type: "string"},
%{
name: "grandParent",
type: "map",
subSchema: [
%{
name: "parentMap",
type: "map",
subSchema: [%{name: "fieldA", type: "string"}, %{name: "fieldB", type: "string"}]
}
]
}
]
[
two_level_list_schema: two_level_list_schema,
nested_maps_schema: nested_maps_schema
]
end
test "list with empty map", %{two_level_list_schema: schema} do
payload = %{"id" => "id", "parentList" => [%{}]}
expected = %{
"id" => "id",
"parentList" => []
}
actual = SchemaFiller.fill(schema, payload)
assert expected == actual
end
test "list with nil", %{two_level_list_schema: schema} do
payload = %{"id" => "id", "parentList" => [nil]}
expected = %{
"id" => "id",
"parentList" => []
}
actual = SchemaFiller.fill(schema, payload)
assert expected == actual
end
test "list with one good value and two ignored values", %{two_level_list_schema: schema} do
payload = %{"id" => "id", "parentList" => [%{}, %{"fieldA" => "child"}, nil]}
expected = %{
"id" => "id",
"parentList" => [%{"fieldA" => "child", "fieldB" => nil}]
}
actual = SchemaFiller.fill(schema, payload)
assert expected == actual
end
test "list with partial map", %{two_level_list_schema: schema} do
payload = %{"id" => "id", "parentList" => [%{"fieldA" => "fieldA"}]}
expected = %{
"id" => "id",
"parentList" => [%{"fieldA" => "fieldA", "fieldB" => nil}]
}
actual = SchemaFiller.fill(schema, payload)
assert expected == actual
end
test "list with 2 partial maps", %{two_level_list_schema: schema} do
payload = %{"id" => "id", "parentList" => [%{"fieldA" => "fieldA"}, %{"fieldB" => "fieldB"}]}
expected = %{
"id" => "id",
"parentList" => [%{"fieldA" => "fieldA", "fieldB" => nil}, %{"fieldA" => nil, "fieldB" => "fieldB"}]
}
actual = SchemaFiller.fill(schema, payload)
assert expected == actual
end
test "empty map grandparent", %{nested_maps_schema: schema} do
payload = %{"id" => "id", "grandParent" => %{}}
expected = %{
"id" => "id",
"grandParent" => %{"parentMap" => %{"fieldA" => nil, "fieldB" => nil}}
}
actual = SchemaFiller.fill(schema, payload)
assert expected == actual
end
test "map with empty map", %{nested_maps_schema: schema} do
payload = %{"id" => "id", "grandParent" => %{"parentMap" => %{}}}
expected = %{
"id" => "id",
"grandParent" => %{"parentMap" => %{"fieldA" => nil, "fieldB" => nil}}
}
actual = SchemaFiller.fill(schema, payload)
assert expected == actual
end
test "map with partial map", %{nested_maps_schema: schema} do
payload = %{"id" => "id", "grandParent" => %{"parentMap" => %{"fieldA" => "fieldA"}}}
expected = %{
"id" => "id",
"grandParent" => %{"parentMap" => %{"fieldA" => "fieldA", "fieldB" => nil}}
}
actual = SchemaFiller.fill(schema, payload)
assert expected == actual
end
end
end
| 25.550218 | 108 | 0.503162 |
ffadee78917713679dab1e46177b7ab85d19a7d7 | 1,552 | exs | Elixir | mix.exs | jotaviobiondo/rajska | d678d501fed0e698d697bb8d58648451091ec1ff | [
"MIT"
] | null | null | null | mix.exs | jotaviobiondo/rajska | d678d501fed0e698d697bb8d58648451091ec1ff | [
"MIT"
] | null | null | null | mix.exs | jotaviobiondo/rajska | d678d501fed0e698d697bb8d58648451091ec1ff | [
"MIT"
] | null | null | null | defmodule Rajska.MixProject do
use Mix.Project
@github_url "https://github.com/jungsoft/rajska"
def project do
[
app: :rajska,
version: "1.2.0",
elixir: "~> 1.8",
start_permanent: Mix.env() == :prod,
deps: deps(),
name: "Rajska",
source_url: @github_url,
description: "Rajska is an authorization library for Absinthe.",
package: package(),
elixirc_paths: elixirc_paths(Mix.env()),
aliases: aliases(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test,
"test.all": :test
]
]
end
def elixirc_paths(:test), do: ["lib", "test/support"]
def elixirc_paths(_), do: ["lib"]
def application do
[
extra_applications: [:logger]
]
end
defp package do
[
files: ~w(lib mix.exs README* LICENSE*),
licenses: ["MIT"],
links: %{
"GitHub" => @github_url,
"Docs" => "https://hexdocs.pm/rajska/"
}
]
end
defp deps do
[
{:ex_doc, "~> 0.19", only: :dev, runtime: false},
{:credo, "~> 1.5.0", only: [:dev, :test], runtime: false},
{:absinthe, "~> 1.4.0 or ~> 1.5.4 or ~> 1.6.0"},
{:excoveralls, "~> 0.11", only: :test},
{:hammer, "~> 6.0", optional: true},
{:mock, "~> 0.3.0", only: :test},
]
end
defp aliases do
[
"test.all": [
"credo --strict",
"test"
]
]
end
end
| 22.171429 | 70 | 0.521263 |
ffae159bf3d577864313f15732e69cdf07e5ee35 | 521 | ex | Elixir | hackerank_elixir/easy/split_strings.ex | eduardorasgado/ElixirWarlock | 1658ab2ffb2870cb81c8a434755d98678572838c | [
"MIT"
] | null | null | null | hackerank_elixir/easy/split_strings.ex | eduardorasgado/ElixirWarlock | 1658ab2ffb2870cb81c8a434755d98678572838c | [
"MIT"
] | 1 | 2021-03-10T05:09:49.000Z | 2021-03-10T05:09:49.000Z | hackerank_elixir/easy/split_strings.ex | eduardorasgado/ElixirWarlock | 1658ab2ffb2870cb81c8a434755d98678572838c | [
"MIT"
] | null | null | null | defmodule SplitStrings do
@moduledoc """
This problem can be found here:
https://www.codewars.com/kata/515de9ae9dcfc28eb6000001/train/elixir
"""
@doc """
solution("abc") # should return ["ab", "c_"]
solution("abcdef") # should return ["ab", "cd", "ef"]
"""
def solution(str_s) do
# Your code here
str_s <> "_"
|> String.graphemes
|> Enum.chunk(2, 2, :discard)
|> Enum.map(&Enum.join/1)
end
end
IO.inspect SplitStrings.solution("abc")
IO.inspect SplitStrings.solution("abcdef")
| 23.681818 | 71 | 0.642994 |
ffae1f87eaff225d317266c626fb62f946e365c0 | 997 | ex | Elixir | lib/opentelemetry_jaeger/span_ref_type_mapper.ex | sadesyllas/opentelemetry_jaeger | 5d2719b810717c6674838dd08ff76d22e470313d | [
"MIT"
] | null | null | null | lib/opentelemetry_jaeger/span_ref_type_mapper.ex | sadesyllas/opentelemetry_jaeger | 5d2719b810717c6674838dd08ff76d22e470313d | [
"MIT"
] | null | null | null | lib/opentelemetry_jaeger/span_ref_type_mapper.ex | sadesyllas/opentelemetry_jaeger | 5d2719b810717c6674838dd08ff76d22e470313d | [
"MIT"
] | null | null | null | defprotocol OpenTelemetryJaeger.SpanRefTypeMapper do
@moduledoc """
An implementor of this protocol is required to read a list of `:opentelemetry.attribute()`s and resolve a
`Jaeger.Thrift.SpanRefType`, if applicable.
"""
require Jaeger.Thrift.SpanRefType, as: SpanRefType
@type span_ref_type :: unquote(SpanRefType.child_of()) | unquote(SpanRefType.follows_from())
@doc """
Resolves a `Jaeger.Thrift.SpanRefType` based on a list of `:opentelemetry.attribute()`s.
"""
@spec resolve(struct(), :opentelemetry.attributes()) :: span_ref_type() | nil
def resolve(_, attributes)
end
defimpl OpenTelemetryJaeger.SpanRefTypeMapper, for: Any do
require Jaeger.Thrift.SpanRefType, as: SpanRefType
alias OpenTelemetryJaeger.SpanRefTypeMapper
@doc """
See `OpenTelemetryJaeger.SpanRefTypeMapper.resolve/2`.
"""
@spec resolve(struct(), :opentelemetry.attributes()) :: SpanRefTypeMapper.span_ref_type() | nil
def resolve(_, _attributes), do: SpanRefType.child_of()
end
| 34.37931 | 107 | 0.751254 |
ffae328d480a36942b6d5437589fce798dc9fdc0 | 258 | ex | Elixir | lib/cowguest/controller.ex | cedretaber/cowguest | dd41ca95f19820de3707e4b1afa04f901a9e9670 | [
"MIT"
] | null | null | null | lib/cowguest/controller.ex | cedretaber/cowguest | dd41ca95f19820de3707e4b1afa04f901a9e9670 | [
"MIT"
] | null | null | null | lib/cowguest/controller.ex | cedretaber/cowguest | dd41ca95f19820de3707e4b1afa04f901a9e9670 | [
"MIT"
] | null | null | null | defmodule Cowguest.Controller do
@moduledoc false
defmacro __using__(opts) do
quote location: :keep do
import Cowguest.Controller
use Plug.Builder, unquote(opts)
end
end
def to_json(value) do
Poison.encode(value)
end
end
| 16.125 | 37 | 0.697674 |
ffae467fe0273ffc63324103531bbc66f702fe39 | 1,776 | ex | Elixir | clients/books/lib/google_api/books/v1/model/volumes.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/books/lib/google_api/books/v1/model/volumes.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/books/lib/google_api/books/v1/model/volumes.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Books.V1.Model.Volumes do
@moduledoc """
## Attributes
* `items` (*type:* `list(GoogleApi.Books.V1.Model.Volume.t)`, *default:* `nil`) - A list of volumes.
* `kind` (*type:* `String.t`, *default:* `nil`) - Resource type.
* `totalItems` (*type:* `integer()`, *default:* `nil`) - Total number of volumes found. This might be greater than the number of volumes returned in this response if results have been paginated.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:items => list(GoogleApi.Books.V1.Model.Volume.t()),
:kind => String.t(),
:totalItems => integer()
}
field(:items, as: GoogleApi.Books.V1.Model.Volume, type: :list)
field(:kind)
field(:totalItems)
end
defimpl Poison.Decoder, for: GoogleApi.Books.V1.Model.Volumes do
def decode(value, options) do
GoogleApi.Books.V1.Model.Volumes.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Books.V1.Model.Volumes do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.509434 | 198 | 0.704955 |
ffae524093f0d26606dee2b7a7956deaaf6de158 | 1,954 | ex | Elixir | src/elixir/irc/lib/server.ex | fmi-unibuc/iclp | 771541a768ddbfba15fa083df2f128e87a84b567 | [
"MIT"
] | null | null | null | src/elixir/irc/lib/server.ex | fmi-unibuc/iclp | 771541a768ddbfba15fa083df2f128e87a84b567 | [
"MIT"
] | null | null | null | src/elixir/irc/lib/server.ex | fmi-unibuc/iclp | 771541a768ddbfba15fa083df2f128e87a84b567 | [
"MIT"
] | null | null | null | defmodule Server do
@moduledoc """
A server maintaining the list of channels and users
"""
@doc """
"""
use GenServer, restart: :transient
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, :ok, opts)
end
def register(server, name) do
GenServer.call(server, {:register, name})
end
@impl true
@spec init(:ok) :: {:ok, ServerState.t()}
def init(:ok) do
{:ok, general} = Channel.start_link(:general, [])
{:ok, usersAgent} = Storage.start_link()
{:ok, channelsAgent} = Storage.start_link()
{:ok, userThreadsAgent} = Storage.start_link()
Storage.put!(channelsAgent, :general, general)
{:ok, %ServerState{
users: usersAgent,
channels: channelsAgent,
userThreads: userThreadsAgent
}}
end
@impl true
def handle_call({:register, name}, from, state) do
{address, _tag} = from
case Storage.put(state.users, name, address) do
:ok ->
{:ok, localChannels} = Storage.start_link()
initState =
%ServerThreadState{
userName: name,
userAddress: address,
globalChannels: state.channels,
userChannels: localChannels
}
{:ok, thread} = ServerThread.start(initState)
GenServer.reply(from, {:registered, thread})
ref = Process.monitor(thread)
Storage.put!(state.userThreads, ref, name)
{:noreply, state}
:not_ok ->
{:reply, :name_already_registered, state}
end
end
@impl true
def handle_info({:DOWN, ref, :process, _thread, _reason}, state) do
case Storage.get(state.userThreads, ref) do
nil -> IO.puts :stderr, "[server]: invalid_reference #{ref}; ignored"
name ->
Storage.delete(state.userThreads, ref)
Storage.delete(state.users, name)
end
{:noreply, state}
end
@impl true
def handle_info(message, state) do
IO.inspect message
{:noreply, state}
end
end
| 26.767123 | 75 | 0.620778 |
ffae6f309a78f5ce3b98305adecd06d568768bfb | 1,278 | ex | Elixir | lib/exhmac/noncer/noncer_server.ex | lizhaochao/ExHmac | 9e7e00999362107e17528d6d9af97da7f461d6a9 | [
"MIT"
] | null | null | null | lib/exhmac/noncer/noncer_server.ex | lizhaochao/ExHmac | 9e7e00999362107e17528d6d9af97da7f461d6a9 | [
"MIT"
] | null | null | null | lib/exhmac/noncer/noncer_server.ex | lizhaochao/ExHmac | 9e7e00999362107e17528d6d9af97da7f461d6a9 | [
"MIT"
] | null | null | null | defmodule ExHmac.Noncer.Server do
@moduledoc false
### Use GenServer To Make Sure Operations Is Atomic.
use GenServer
alias ExHmac.{Config, Noncer}
alias ExHmac.Noncer.GarbageCollector, as: GC
@gc_interval_milli Config.get_gc_interval_milli()
def start_link(opts) when is_list(opts) do
with(
impl_m <- __MODULE__,
repo_name <- impl_m,
name_opt <- [name: repo_name]
) do
GenServer.start_link(impl_m, :ok, opts ++ name_opt)
end
end
@impl true
def init(:ok) do
gc_timer_fire()
{:ok, nil}
end
@impl true
def handle_info(:collect, state) do
with(
_ <- GC.collect(),
_ <- gc_timer_fire()
) do
{:noreply, state}
end
end
@impl true
def handle_info(_, state), do: {:ok, state}
@impl true
def handle_call({nonce, curr_ts, freezing_secs, precision}, _from, state) do
result = Noncer.check(nonce, curr_ts, freezing_secs, precision)
{:reply, result, state}
end
@impl true
def handle_cast({:save_meta, raw_result, nonce, arrived_at, curr_ts, precision}, state) do
Noncer.save_meta(raw_result, nonce, arrived_at, curr_ts, precision)
{:noreply, state}
end
def gc_timer_fire do
Process.send_after(self(), :collect, @gc_interval_milli)
end
end
| 22.421053 | 92 | 0.668232 |
ffae75407e58f17d28e4079698afa5b7cedd43bb | 86 | ex | Elixir | web/views/order_view.ex | the5fire/ex_admin_demo | 655540499a68670c7349974b47c5e0bfee29aa99 | [
"MIT"
] | 22 | 2016-03-31T02:58:09.000Z | 2020-06-16T02:37:16.000Z | web/views/order_view.ex | the5fire/ex_admin_demo | 655540499a68670c7349974b47c5e0bfee29aa99 | [
"MIT"
] | 7 | 2016-03-27T14:22:59.000Z | 2021-04-27T14:33:51.000Z | web/views/order_view.ex | the5fire/ex_admin_demo | 655540499a68670c7349974b47c5e0bfee29aa99 | [
"MIT"
] | 13 | 2016-04-21T06:05:32.000Z | 2018-08-23T06:38:54.000Z | defmodule ExAdminDemo.OrderView do
use ExAdminDemo.Web, :view
require Xain
end
| 14.333333 | 34 | 0.767442 |
ffae939e88257663362e40e17430eb1577714e85 | 720 | exs | Elixir | jwt_app/mix.exs | gguimond/elixir | 415a7ed10fb44d84089ff89fb651b765b5f5e53f | [
"MIT"
] | 1 | 2019-03-28T09:08:16.000Z | 2019-03-28T09:08:16.000Z | jwt_app/mix.exs | gguimond/elixir | 415a7ed10fb44d84089ff89fb651b765b5f5e53f | [
"MIT"
] | null | null | null | jwt_app/mix.exs | gguimond/elixir | 415a7ed10fb44d84089ff89fb651b765b5f5e53f | [
"MIT"
] | null | null | null | defmodule JwtApp.Mixfile do
use Mix.Project
def project do
[app: :jwt_app,
version: "0.1.0",
elixir: "~> 1.3",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[ mod: {JwtApp, []},applications: [:guardian]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:guardian, "~> 1.0"},
]
end
end
| 20.571429 | 77 | 0.591667 |
ffaee73c8e0d07ef7e556c48b13e1acf282a3fa4 | 3,859 | ex | Elixir | apps/omg_api/lib/fee_server.ex | hoardexchange/elixir-omg | 423528699d467f1cc0d02c596290ab907af38c2c | [
"Apache-2.0"
] | null | null | null | apps/omg_api/lib/fee_server.ex | hoardexchange/elixir-omg | 423528699d467f1cc0d02c596290ab907af38c2c | [
"Apache-2.0"
] | null | null | null | apps/omg_api/lib/fee_server.ex | hoardexchange/elixir-omg | 423528699d467f1cc0d02c596290ab907af38c2c | [
"Apache-2.0"
] | 2 | 2020-06-07T11:14:54.000Z | 2020-08-02T07:36:32.000Z | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.API.FeeServer do
@moduledoc """
Maintains current fee rates and acceptable tokens.
Updates fees information from external source.
Provides function to validate transaction's fee.
"""
alias OMG.API.Fees
use GenServer
use OMG.API.LoggerExt
@file_changed_check_interval_ms 10_000
def start_link(_args) do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
def init(args) do
:ok = ensure_ets_init()
_ =
case Application.get_env(:omg_api, :ignore_fees) do
true ->
:ok = save_fees(:ignore, 0)
_ = Logger.info("fee specs from file is ignore")
opt when is_nil(opt) or is_boolean(opt) ->
:ok = update_fee_spec()
{:ok, _} = :timer.apply_interval(@file_changed_check_interval_ms, __MODULE__, :update_fee_spec, [])
end
_ = Logger.info("Started FeeServer")
{:ok, args}
end
@doc """
Returns accepted tokens and amounts in which transaction fees are collected
"""
@spec transaction_fees() :: {:ok, Fees.fee_t()}
def transaction_fees do
{:ok, load_fees()}
end
@doc """
Reads fee specification file if needed and updates :ets state with current fees information
"""
@spec update_fee_spec() :: :ok
def update_fee_spec do
path = Application.fetch_env!(:omg_api, :fee_specs_file_path)
:ok =
with {:reload, changed_at} <- should_load_file(path),
{:ok, content} <- File.read(path),
{:ok, specs} <- Fees.parse_file_content(content) do
:ok = save_fees(specs, changed_at)
_ = Logger.info("Reloaded #{inspect(Enum.count(specs))} fee specs from file, changed at #{inspect(changed_at)}")
:ok
else
{:file_unchanged, _last_change_at} ->
:ok
{:error, :enoent} ->
_ = Logger.error("The fee specification file #{inspect(path)} not found in #{System.get_env("PWD")}")
{:error, :fee_spec_not_found}
error ->
_ = Logger.warn("Unable to update fees from file. Reason: #{inspect(error)}")
error
end
end
defp save_fees(fee_specs, loaded_at) do
true = :ets.insert(:fees_bucket, [{:last_loaded, loaded_at}, {:fees, fee_specs}])
:ok
end
defp load_fees, do: :ets.lookup_element(:fees_bucket, :fees, 2)
defp should_load_file(path) do
loaded = get_last_loaded_file_timestamp()
changed = get_file_last_modified_timestamp(path)
if changed > loaded,
do: {:reload, changed},
else: {:file_unchanged, loaded}
end
defp get_last_loaded_file_timestamp do
[{:last_loaded, timestamp}] = :ets.lookup(:fees_bucket, :last_loaded)
# When not matched we prefer immediate crash here as this should never happened
timestamp
end
defp get_file_last_modified_timestamp(path) do
case File.stat(path, time: :posix) do
{:ok, %File.Stat{mtime: mtime}} ->
mtime
# possibly wrong path - returns current timestamp to force file reload where file errors are handled
_ ->
:os.system_time(:second)
end
end
defp ensure_ets_init do
_ = if :undefined == :ets.info(:fees_bucket), do: :ets.new(:fees_bucket, [:set, :public, :named_table])
true = :ets.insert(:fees_bucket, {:last_loaded, 0})
:ok
end
end
| 29.684615 | 120 | 0.66779 |
ffaf04efc3d7e1fdccc73b50c240e3b8e10af885 | 2,288 | ex | Elixir | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p3beta1__explicit_content_annotation.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p3beta1__explicit_content_annotation.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p3beta1__explicit_content_annotation.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p3beta1_ExplicitContentAnnotation do
@moduledoc """
Explicit content annotation (based on per-frame visual signals only). If no explicit content has been detected in a frame, no annotations are present for that frame.
## Attributes
* `frames` (*type:* `list(GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p3beta1_ExplicitContentFrame.t)`, *default:* `nil`) - All video frames where explicit content was detected.
* `version` (*type:* `String.t`, *default:* `nil`) - Feature version.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:frames =>
list(
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p3beta1_ExplicitContentFrame.t()
),
:version => String.t()
}
field(:frames,
as:
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p3beta1_ExplicitContentFrame,
type: :list
)
field(:version)
end
defimpl Poison.Decoder,
for:
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p3beta1_ExplicitContentAnnotation do
def decode(value, options) do
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p3beta1_ExplicitContentAnnotation.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for:
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p3beta1_ExplicitContentAnnotation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.2 | 204 | 0.751311 |
ffaf2344091599cf28cf44a73479815e0b2ea039 | 5,630 | ex | Elixir | Meta-Interpreter.ex | evansaboo/elixir-programming | 57408424914003091003430500473546c94354d9 | [
"MIT"
] | null | null | null | Meta-Interpreter.ex | evansaboo/elixir-programming | 57408424914003091003430500473546c94354d9 | [
"MIT"
] | null | null | null | Meta-Interpreter.ex | evansaboo/elixir-programming | 57408424914003091003430500473546c94354d9 | [
"MIT"
] | null | null | null | defmodule Env do
def new() do
[]
end
def add(id, str, env) do
[{id,str}|env]
end
def lookup(id, env) do
case env do
[] ->
nil
[{^id, str}|t] ->
{id,str}
[h|t] ->
lookup(id, t)
end
end
def remove(ids, env) do
case ids do
[] ->
env
[id|t] ->
remove(t, rem_(id, env))
end
end
def rem_(id, []) do [] end
def rem_(id, [{id, _}|t]) do
t
end
def rem_(id, [h|t]) do
[h|rem_(id, t)]
end
def closure(_, []) do :error end
def closure([], _) do [] end
def closure([id|t], env) do
case Env.lookup(id, env) do
nil ->
:error
str ->
[str| closure(t, env)]
end
end
def args([], [], env) do env end
def args([var|par], [str| args], env) do
args(par, args, add(var, str, env))
end
def args(_, _,_) do :error end
end
defmodule Eager do
def eval_expr({:atm,id}, _, _) do {:ok, id} end
def eval_expr({:var, id}, env, _) do
case Env.lookup(id, env) do
nil ->
:error
{_, str} ->
{:ok, str}
end
end
def eval_expr({:cons, head, tail}, env, prg) do
case eval_expr(head, env, prg) do
:error ->
:error
{:ok, id} ->
case eval_expr(tail, env, prg) do
:error ->
:error
{:ok, ts} ->
{:ok, [id|ts]}
end
end
end
def eval_expr({:case, expr, cls}, env, prg) do
case eval_expr(expr, env, prg) do
:error ->
:error
{:ok, str} ->
eval_cls(cls, str, env, prg)
end
end
def eval_expr({:lambda, par, free, seq}, env, _) do
case Env.closure(free, env) do
:error ->
:error
closure ->
{:ok, {:closure, par, seq, closure}}
end
end
def eval_expr({:apply, expr, args}, env, prg) do
case eval_expr(expr, env, args) do
:error ->
:error
{:ok, {:closure, par, seq, closure}} ->
case eval_args(args, env, prg) do
:error ->
:error
strs ->
env = Env.args(par, strs, closure)
eval_seq(seq, env, prg)
end
end
end
def eval_expr({:call, id, args}, env, prg) when is_atom(id) do
case List.keyfind(prg, id, 0) do
nil ->
:error
{_, par, seq} ->
case eval_args(args,env, prg) do
:error ->
:error
strs ->
env= Env.args(par,strs, [])
eval_seq(seq,env,prg)
end
end
end
def eval_args([], _, _) do [] end
def eval_args([atm|t], env, prg) do
case eval_expr(atm, env, prg) do
:error ->
:error
{:ok, str} ->
[str|eval_args(t, env, prg)]
end
end
def eval_cls([], _,_, _) do :error end
def eval_cls([{:clause, ptr, seq}| cls], str, env, prg) do
case eval_match(ptr,str, env) do
:fail ->
eval_cls(cls,str,env, prg)
{:ok, env} ->
eval_seq(seq,env, prg)
end
end
def eval_match(:ignore, _, env) do {:ok, env} end
def eval_match({:atm, id}, id, env) do {:ok, env} end
def eval_match({:var, id}, str, env) do
case Env.lookup(id, env) do
nil ->
{:ok, Env.add(id, str, env)}
{_, ^str} ->
{:ok, env}
{_, _} ->
:fail
end
end
def eval_match({:cons, hp, tp}, [hs|ts], env) do
case eval_match(hp, hs, env) do
:fail ->
:fail
{:ok, env1} ->
eval_match(tp, ts, env1)
end
end
def eval_match(_, _, _) do
:fail
end
def eval(seq) do
eval_seq(seq, Env.new(), [])
end
def eval_seq([exp], env, prg) do
eval_expr(exp, env, prg)
end
def eval_seq([{:match, patr, td}| t], env, prg) do
case eval_expr(td, env, prg) do
:error ->
:error
{:ok, str} ->
vars = extract_vars(patr)
env = Env.remove(vars, env)
case eval_match(patr, str,env) do
:fail ->
:error
{:ok, env} ->
eval_seq(t, env, prg)
end
end
end
def extract_vars(:ignore) do [] end
def extract_vars({:var, id}) do
[id]
end
def extract_vars({:cons, ts, td}) do
extract_vars(ts) ++ extract_vars(td)
end
def pgrm do
[{:append, [:x, :y],
[{:case, {:var, :x},
[{:clause, {:atm, []}, [{:var, :y}]},
{:clause, {:cons, {:var, :hd}, {:var, :tl}},
[{:cons,
{:var, :hd},
{:call, :append, [{:var, :tl}, {:var, :y}]}}]
}]
}]
}]
end
def seq do
[{:match, {:var, :x},
{:cons, {:atm, :a}, {:cons, {:atm, :b}, {:atm, []}}}},
{:match, {:var, :y},
{:cons, {:atm, :c}, {:cons, {:atm, :d}, {:atm, []}}}},
{:call, :append, [{:var, :x}, {:var, :y}]}
]
end
end | 24.267241 | 66 | 0.397869 |
ffaf68362ce47db7753fdd6da7c061a1f2368634 | 186 | exs | Elixir | time_tracker_backend/priv/repo/migrations/20160818200407_add_users.exs | knewter/time-tracker | 1f58031112a24c26a1a54ac33105b4430a04e954 | [
"MIT"
] | 382 | 2016-08-18T07:34:27.000Z | 2021-02-25T20:46:34.000Z | time_tracker_backend/priv/repo/migrations/20160818200407_add_users.exs | knewter/time-tracker | 1f58031112a24c26a1a54ac33105b4430a04e954 | [
"MIT"
] | 1 | 2017-09-30T00:01:26.000Z | 2017-09-30T00:01:26.000Z | time_tracker_backend/priv/repo/migrations/20160818200407_add_users.exs | knewter/time-tracker | 1f58031112a24c26a1a54ac33105b4430a04e954 | [
"MIT"
] | 45 | 2016-08-30T07:34:04.000Z | 2020-01-27T11:39:26.000Z | defmodule TimeTrackerBackend.Repo.Migrations.AddUsers do
use Ecto.Migration
def change do
create table(:users) do
add :name, :string
timestamps()
end
end
end
| 15.5 | 56 | 0.688172 |
ffaf8b0de17de0813eb8f6eb372167497e36a416 | 940 | exs | Elixir | mix.exs | DanilaMihailov/docs_getter | 82f95bc60a885ea1ec778e218a9b6d06e8ee05c5 | [
"Apache-2.0"
] | null | null | null | mix.exs | DanilaMihailov/docs_getter | 82f95bc60a885ea1ec778e218a9b6d06e8ee05c5 | [
"Apache-2.0"
] | null | null | null | mix.exs | DanilaMihailov/docs_getter | 82f95bc60a885ea1ec778e218a9b6d06e8ee05c5 | [
"Apache-2.0"
] | null | null | null | defmodule DocsGetter.MixProject do
use Mix.Project
def project do
[
app: :docs_getter,
version: "0.1.0-beta.1",
source_url: "https://github.com/DanilaMihailov/docs_getter",
homepage_url: "https://github.com/DanilaMihailov/docs_getter",
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps(),
docs: docs(),
description: description(),
package: package()
]
end
def application do
[]
end
defp description() do
"Builds docs for all dependencies"
end
defp package() do
[
licenses: ["Apache-2.0"],
links: %{"GitHub" => "https://github.com/DanilaMihailov/docs_getter"}
]
end
defp deps do
[
{:ex_doc, "~> 0.21", only: :dev, runtime: false}
]
end
defp docs do
[
api_reference: false,
main: "readme",
extras: [
"README.md": [title: "README"]
]
]
end
end
| 18.8 | 75 | 0.567021 |
ffafa181b56f924ef2db3ffe1a1e4fc918356c34 | 718 | exs | Elixir | config/test.exs | hammoc-app/hammoc-elixir | 00cd24e2170cc3dee65c7848868ea3d1096ac25c | [
"MIT"
] | 5 | 2019-07-13T22:20:50.000Z | 2020-07-13T05:05:43.000Z | config/test.exs | hammoc-app/hammoc-elixir | 00cd24e2170cc3dee65c7848868ea3d1096ac25c | [
"MIT"
] | 33 | 2019-08-01T03:48:23.000Z | 2021-07-28T03:29:40.000Z | config/test.exs | hammoc-app/hammoc-elixir | 00cd24e2170cc3dee65c7848868ea3d1096ac25c | [
"MIT"
] | 2 | 2019-09-03T22:45:11.000Z | 2020-01-01T23:56:58.000Z | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :hammoc, HammocWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :hammoc, Hammoc.Repo,
username: "postgres",
password: "postgres",
database: "hammoc_test",
hostname: "localhost",
port: "5432",
pool: Ecto.Adapters.SQL.Sandbox
config :hammoc, Hammoc.Retriever, client_module: Hammoc.Retriever.Client.RemoteControlled
config :ueberauth, Ueberauth, providers: [twitter: {Test.Support.Stubs.UeberauthStrategy, []}]
config :phoenix_integration,
endpoint: HammocWeb.Endpoint
| 26.592593 | 94 | 0.750696 |
ffafa43bda470a98d411b34ef3d01dcc2d79b1d9 | 1,678 | exs | Elixir | config/test.exs | onomated/coherence | 210c6e56a48f53b6a02a6f99a6b5260027ae6633 | [
"MIT"
] | 1,347 | 2016-07-04T23:20:10.000Z | 2022-02-10T20:10:48.000Z | config/test.exs | onomated/coherence | 210c6e56a48f53b6a02a6f99a6b5260027ae6633 | [
"MIT"
] | 378 | 2016-07-06T16:30:28.000Z | 2021-09-16T13:34:05.000Z | config/test.exs | onomated/coherence | 210c6e56a48f53b6a02a6f99a6b5260027ae6633 | [
"MIT"
] | 276 | 2016-07-06T20:26:17.000Z | 2021-12-06T19:32:41.000Z | use Mix.Config
# config :coherence, ecto_repos: [TestCoherence.Repo]
config :logger, level: :error
config :coherence, TestCoherenceWeb.Endpoint,
http: [port: 4001],
secret_key_base: "HL0pikQMxNSA58Dv4mf26O/eh1e4vaJDmX0qLgqBcnS94gbKu9Xn3x114D+mHYcX",
server: false
config :coherence, ecto_repos: [TestCoherence.Repo]
config :coherence, TestCoherence.Repo,
adapter: Ecto.Adapters.Postgres,
username: System.get_env("DB_USERNAME") || "postgres",
password: System.get_env("DB_PASSWORD") || "postgres",
database: "coherence_test",
hostname: System.get_env("DB_HOSTNAME") || "localhost",
pool: Ecto.Adapters.SQL.Sandbox
config :coherence,
user_schema: TestCoherence.User,
password_hashing_alg: Comeonin.Bcrypt,
repo: TestCoherence.Repo,
router: TestCoherenceWeb.Router,
module: TestCoherence,
web_module: TestCoherenceWeb,
layout: {Coherence.LayoutView, :app},
messages_backend: TestCoherenceWeb.Coherence.Messages,
logged_out_url: "/",
email_from_name: "Your Name",
email_from_email: "yourname@example.com",
opts: [
:confirmable,
:authenticatable,
:recoverable,
:lockable,
:trackable,
:unlockable_with_token,
:invitable,
:registerable,
:rememberable
],
registration_permitted_attributes: [
"email",
"name",
"password",
"password_confirmation",
"current_password"
],
invitation_permitted_attributes: ["name", "email"],
password_reset_permitted_attributes: [
"reset_password_token",
"password",
"password_confirmation"
],
session_permitted_attributes: ["remember", "email", "password"],
confirm_email_updates: true
config :bcrypt_elixir, log_rounds: 4
| 27.064516 | 86 | 0.733611 |
ffafab54b182f401eca902fcf493e10ea781e8a6 | 24,367 | exs | Elixir | lib/ex_unit/test/ex_unit/diff_test.exs | fedora-erlang/elixir | 84c1044164a3e99b4a7d155024eb1130ab5d3377 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/test/ex_unit/diff_test.exs | fedora-erlang/elixir | 84c1044164a3e99b4a7d155024eb1130ab5d3377 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/test/ex_unit/diff_test.exs | fedora-erlang/elixir | 84c1044164a3e99b4a7d155024eb1130ab5d3377 | [
"Apache-2.0"
] | null | null | null | Code.require_file("../test_helper.exs", __DIR__)
defmodule ExUnit.DiffTest do
use ExUnit.Case, async: true
alias Inspect.Algebra
alias ExUnit.{Assertions, Diff}
defmodule User do
defstruct [:name, :age]
end
defmodule Person do
defstruct [:age]
end
defmodule Opaque do
defstruct [:data]
defimpl Inspect do
def inspect(_, _) do
"#Opaque<???>"
end
end
end
defmacrop one, do: 1
defmacrop tuple(a, b) do
quote do
{unquote(a), unquote(b)}
end
end
defmacrop pin_x do
x = Macro.var(:x, nil)
quote(do: ^unquote(x))
end
defmacrop assert_diff(expr, expected_binding, pins \\ [])
defmacrop assert_diff({:=, _, [left, right]}, expected_binding, pins) do
left = Assertions.__expand_pattern__(left, __CALLER__) |> Macro.escape()
quote do
assert_diff(
unquote(left),
unquote(right),
unquote(expected_binding),
{:match, unquote(pins)}
)
end
end
defmacrop assert_diff({:==, _, [left, right]}, [], []) do
quote do
assert_diff(unquote(left), unquote(right), [], :expr)
end
end
defmacrop refute_diff(expr, expected_left, expected_right, pins \\ [])
defmacrop refute_diff({:=, _, [left, right]}, expected_left, expected_right, pins) do
left = Assertions.__expand_pattern__(left, __CALLER__) |> Macro.escape()
quote do
refute_diff(
unquote(left),
unquote(right),
unquote(expected_left),
unquote(expected_right),
{:match, unquote(pins)}
)
end
end
defmacrop refute_diff({:==, _, [left, right]}, expected_left, expected_right, []) do
quote do
refute_diff(
unquote(left),
unquote(right),
unquote(expected_left),
unquote(expected_right),
:expr
)
end
end
test "atoms" do
assert_diff(:a = :a, [])
assert_diff(:a = :a, [])
assert_diff(:"$a" = :"$a", [])
refute_diff(:a = :b, "-:a-", "+:b+")
refute_diff(:a = :aa, "-:a-", "+:aa+")
refute_diff(:"$" = :"$a", ~s[-:"$"-], ~s[+:"$a"+])
refute_diff(:"$a" = :"$b", ~s[-:"$a"-], ~s[+:"$b"+])
refute_diff(:bar = 42, "-:bar-", "+42+")
refute_diff(42 = :bar, "-42-", "+:bar+")
pins = [a: :a, b: :b]
assert_diff(x = :a, [x: :a], pins)
assert_diff(^a = :a, [], pins)
assert_diff(^b = :b, [], pins)
refute_diff(^a = :b, "-^a-", "+:b+", pins)
refute_diff(^b = :a, "-^b-", "+:a+", pins)
end
test "integers" do
assert_diff(123 = 123, [])
assert_diff(-123 = -123, [])
assert_diff(123 = +123, [])
assert_diff(+123 = 123, [])
refute_diff(12 = 13, "1-2-", "1+3+")
refute_diff(12345 = 123, "123-45-", "123")
refute_diff(123 = 12345, "123", "123+45+")
refute_diff(12345 = 345, "-12-345", "345")
refute_diff(345 = 12345, "345", "+12+345")
refute_diff(123 = -123, "123", "+-+123")
refute_diff(-123 = 123, "---123", "123")
refute_diff(491_512_235 = 490_512_035, "49-1-512-2-35", "49+0+512+0+35")
assert_diff(0xF = 15, [])
refute_diff(0xF = 16, "1-5-", "1+6+")
refute_diff(123 = :a, "-123-", "+:a+")
end
test "floats" do
assert_diff(123.0 = 123.0, [])
assert_diff(-123.0 = -123.0, [])
assert_diff(123.0 = +123.0, [])
assert_diff(+123.0 = 123.0, [])
refute_diff(1.2 = 1.3, "1.-2-", "1.+3+")
refute_diff(12.345 = 12.3, "12.3-45-", "12.3")
refute_diff(12.3 = 12.345, "12.3", "12.3+45+")
refute_diff(123.45 = 3.45, "-12-3.45", "3.45")
refute_diff(3.45 = 123.45, "3.45", "+12+3.45")
refute_diff(1.23 = -1.23, "1.23", "+-+1.23")
refute_diff(-1.23 = 1.23, "---1.23", "1.23")
refute_diff(123.0 = :a, "-123.0-", "+:a+")
refute_diff(123.0 = 123_512_235, "-123.0-", "+123512235+")
end
test "lists" do
assert_diff([] = [], [])
assert_diff([:a] = [:a], [])
assert_diff([:a, :b, :c] = [:a, :b, :c], [])
refute_diff([] = [:a], "[]", "[+:a+]")
refute_diff([:a] = [], "[-:a-]", "[]")
refute_diff([:a] = [:b], "[-:a-]", "[+:b+]")
refute_diff([:a, :b, :c] = [:a, :b, :x], "[:a, :b, -:c-]", "[:a, :b, +:x+]")
refute_diff([:a, :x, :c] = [:a, :b, :c], "[:a, -:x-, :c]", "[:a, +:b+, :c]")
refute_diff([:a, :d, :b, :c] = [:a, :b, :c, :d], "[:a, -:d-, :b, :c]", "[:a, :b, :c, +:d+]")
refute_diff([:a, :b, :c] = [:a, :b, []], "[:a, :b, -:c-]", "[:a, :b, +[]+]")
refute_diff([:a, :b, []] = [:a, :b, :c], "[:a, :b, -[]-]", "[:a, :b, +:c+]")
refute_diff([:a, :b, :c] = [:a, :b], "[:a, :b, -:c-]", "[:a, :b]")
refute_diff([:a, :b] = [:a, :b, :c], "[:a, :b]", "[:a, :b, +:c+]")
refute_diff([:a, :b, :c, :d, :e] = [:a, :b], "[:a, :b, -:c-, -:d-, -:e-]", "[:a, :b]")
refute_diff([:a, :b] = [:a, :b, :c, :d, :e], "[:a, :b]", "[:a, :b, +:c+, +:d+, +:e+]")
refute_diff(
[:a, [:d, :b, :c]] = [:a, [:b, :c, :d]],
"[:a, [-:d-, :b, :c]]",
"[:a, [:b, :c, +:d+]]"
)
refute_diff(
[:e, :a, :b, :c, :d] = [:a, :b, :c, :d, :e],
"[-:e-, :a, :b, :c, :d]",
"[:a, :b, :c, :d, +:e+]"
)
refute_diff([:a, [:c, :b]] = [:a, [:b, :c]], "[:a, [-:c-, :b]]", "[:a, [:b, +:c+]]")
refute_diff(:a = [:a, [:b, :c]], "-:a-", "+[:a, [:b, :c]]+")
pins = [a: :a, b: :b, list_ab: [:a, :b]]
assert_diff(x = [], [x: []], pins)
assert_diff(x = [:a, :b], [x: [:a, :b]], pins)
assert_diff([x] = [:a], [x: :a], pins)
assert_diff([x, :b, :c] = [:a, :b, :c], [x: :a], pins)
assert_diff([x, y, z] = [:a, :b, :c], [x: :a, y: :b, z: :c], pins)
assert_diff([x, x, :c] = [:a, :a, :c], [x: :a], pins)
refute_diff([x] = [], "[-x-]", "[]")
refute_diff([x, :b, :c] = [:a, :b, :x], "[x, :b, -:c-]", "[:a, :b, +:x+]")
refute_diff([x, x, :c] = [:a, :b, :c], "[x, -x-, :c]", "[:a, +:b+, :c]")
assert_diff(^list_ab = [:a, :b], [], pins)
assert_diff([^a, :b, :c] = [:a, :b, :c], [], pins)
assert_diff([^a, ^b, :c] = [:a, :b, :c], [], pins)
assert_diff([^a, a, :c] = [:a, :b, :c], [a: :b], pins)
assert_diff([b, ^b, :c] = [:a, :b, :c], [b: :a], pins)
refute_diff(^list_ab = [:x, :b], "-^list_ab-", "[+:x+, :b]", pins)
refute_diff([^a, :b, :c] = [:a, :b, :x], "[^a, :b, -:c-]", "[:a, :b, +:x+]", pins)
refute_diff([:a, ^a, :c] = [:a, :b, :c], "[:a, -^a-, :c]", "[:a, +:b+, :c]", pins)
refute_diff(
[x, :a, :b, :c, :d] = [:a, :b, :c, :d, :e],
"[x, -:a-, :b, :c, :d]",
"[:a, :b, :c, :d, +:e+]"
)
refute_diff([:a, :b] = :a, "-[:a, :b]-", "+:a+")
end
test "improper lists" do
assert_diff([:a | :b] = [:a | :b], [])
assert_diff([:a, :b | :c] = [:a, :b | :c], [])
refute_diff([:a | :b] = [:b | :a], "[-:a- | -:b-]", "[+:b+ | +:a+]")
refute_diff([:a | :b] = [:a | :x], "[:a | -:b-]", "[:a | +:x+]")
refute_diff([:a, :b | :c] = [:a, :b | :x], "[:a, :b | -:c-]", "[:a, :b | +:x+]")
refute_diff([:a, :x | :c] = [:a, :b | :c], "[:a, -:x- | :c]", "[:a, +:b+ | :c]")
refute_diff([:x, :b | :c] = [:a, :b | :c], "[-:x-, :b | :c]", "[+:a+, :b | :c]")
refute_diff([:c, :a | :b] = [:a, :b | :c], "[-:c-, :a | -:b-]", "[:a, +:b+ | +:c+]")
refute_diff(
[:a, :c, :x | :b] = [:a, :b, :c | :d],
"[:a, :c, -:x- | -:b-]",
"[:a, +:b+, :c | +:d+]"
)
refute_diff([:a | :d] = [:a, :b, :c | :d], "[:a | :d]", "[:a, +:b+, +:c+ | :d]")
refute_diff(
[[:a | :x], :x | :d] = [[:a | :b], :c | :d],
"[[:a | -:x-], -:x- | :d]",
"[[:a | +:b+], +:c+ | :d]"
)
assert_diff([:a | x] = [:a | :b], x: :b)
end
test "proper lists" do
assert_diff([:a | [:b]] = [:a, :b], [])
assert_diff([:a | [:b, :c]] = [:a, :b, :c], [])
refute_diff([:a | [:b]] = [:a, :x], "[:a | [-:b-]]", "[:a, +:x+]")
refute_diff([:a, :b | [:c]] = [:a, :b, :x], "[:a, :b | [-:c-]]", "[:a, :b, +:x+]")
refute_diff([:a, :x | [:c]] = [:a, :b, :c], "[:a, -:x- | [:c]]", "[:a, +:b+, :c]")
refute_diff([:a | [:b, :c]] = [:a, :b, :x], "[:a | [:b, -:c-]]", "[:a, :b, +:x+]")
refute_diff([:a | [:b, :c]] = [:x, :b, :c], "[-:a- | [:b, :c]]", "[+:x+, :b, :c]")
refute_diff(
[:a, :c, :x | [:b, :c]] = [:a, :b, :c, :d, :e],
"[:a, -:c-, -:x- | [:b, :c]]",
"[:a, :b, :c, +:d+, +:e+]"
)
refute_diff([:a, :b | [:c]] = [:a, :b], "[:a, :b | [-:c-]]", "[:a, :b]")
refute_diff([:a, :b | []] = [:a, :b, :c], "[:a, :b | []]", "[:a, :b, +:c+]")
refute_diff([:a, :b | [:c, :d]] = [:a, :b, :c], "[:a, :b | [:c, -:d-]]", "[:a, :b, :c]")
refute_diff([:a, :b | [:c, :d]] = [:a], "[:a, -:b- | [-:c-, -:d-]]", "[:a]")
refute_diff(
[:a, [:b, :c] | [:d, :e]] = [:a, [:x, :y], :d, :e],
"[:a, [-:b-, -:c-] | [:d, :e]]",
"[:a, [+:x+, +:y+], :d, :e]"
)
refute_diff(
[:a, [:b, :c] | [:d, :e]] = [:a, [:x, :c], :d, :e],
"[:a, [-:b-, :c] | [:d, :e]]",
"[:a, [+:x+, :c], :d, :e]"
)
pins = [list_bc: [:b, :c]]
assert_diff([:a | x] = [:a, :b], [x: [:b]], pins)
assert_diff([:a | x] = [:a, :b, :c], [x: [:b, :c]], pins)
assert_diff([:a | ^list_bc] = [:a, :b, :c], [], pins)
refute_diff([:a | ^list_bc] = [:x, :x, :c], "[-:a- | -^list_bc-]", "[+:x+, +:x+, :c]", pins)
refute_diff([:a | ^list_bc] = [:a, :x, :c], "[:a | -^list_bc-]", "[:a, +:x+, :c]", pins)
end
test "concat lists" do
assert_diff([:a] ++ [:b] = [:a, :b], [])
assert_diff([:a, :b] ++ [] = [:a, :b], [])
assert_diff([] ++ [:a, :b] = [:a, :b], [])
refute_diff([:a, :b] ++ [:c] = [:a, :b], "[:a, :b] ++ [-:c-]", "[:a, :b]")
refute_diff([:a, :c] ++ [:b] = [:a, :b], "[:a, -:c-] ++ [:b]", "[:a, :b]")
refute_diff([:a] ++ [:b] ++ [:c] = [:a, :b], "[:a] ++ [:b] ++ [-:c-]", "[:a, :b]")
assert_diff([:a] ++ :b = [:a | :b], [])
assert_diff([:a] ++ x = [:a, :b], x: [:b])
refute_diff([:a, :b] ++ :c = [:a, :b, :c], "[:a, :b] ++ -:c-", "[:a, :b, +:c+]")
refute_diff([:a] ++ [:b] ++ :c = [:a, :b, :c], "[:a] ++ [:b] ++ -:c-", "[:a, :b, +:c+]")
refute_diff([:a] ++ [:b] = :a, "-[:a] ++ [:b]-", "+:a+")
end
test "mixed lists" do
refute_diff([:a | :b] = [:a, :b], "[:a | -:b-]", "[:a, +:b+]")
refute_diff([:a, :b] = [:a | :b], "[:a, -:b-]", "[:a | +:b+]")
refute_diff([:a | [:b]] = [:a | :b], "[:a | -[:b]-]", "[:a | +:b+]")
refute_diff([:a | [:b | [:c]]] = [:a | :c], "[:a | [-:b- | -[:c]-]]", "[:a | +:c+]")
refute_diff([:a | :b] = [:a, :b, :c], "[:a | -:b-]", "[:a, +:b+, +:c+]")
refute_diff([:a, :b, :c] = [:a | :b], "[:a, -:b-, -:c-]", "[:a | +:b+]")
refute_diff([:a | [:b] ++ [:c]] = [:a, :b], "[:a | [:b] ++ [-:c-]]", "[:a, :b]")
refute_diff(
[:a | [:b] ++ [:c]] ++ [:d | :e] = [:a, :b | :e],
"[:a | [:b] ++ [-:c-]] ++ [-:d- | :e]",
"[:a, :b | :e]"
)
end
test "lists outside of match context" do
refute_diff(
[:a, {:|, [], [:b, :c]}] == [:a, :b | :c],
"[:a, -{:|, [], [:b, :c]}-]",
"[:a, +:b+ | +:c+]"
)
end
test "keyword lists" do
assert_diff([file: "nofile", line: 1] = [file: "nofile", line: 1], [])
refute_diff(
[file: "nofile", line: 1] = [file: nil, lime: 1],
~s/[file: -"nofile"-, -line:- 1]/,
"[file: +nil+, +lime:+ 1]"
)
refute_diff(
[file: nil, line: 1] = [file: "nofile"],
"[file: -nil-, -line: 1-]",
~s/[file: +"nofile"+]/
)
refute_diff(
["foo-bar": 1] = [],
~s/[-"foo-bar": 1-]/,
"[]"
)
refute_diff(
[file: nil] = [{:line, 1}, {1, :foo}],
"[-file:- -nil-]",
"[{+:line+, +1+}, +{1, :foo}+]"
)
end
test "tuples" do
assert_diff({:a, :b} = {:a, :b}, [])
refute_diff({:a, :b} = {:a, :x}, "{:a, -:b-}", "{:a, +:x+}")
refute_diff({:a, :b} = {:x, :x}, "{-:a-, -:b-}", "{+:x+, +:x+}")
refute_diff({:a, :b, :c} = {:a, :b, :x}, "{:a, :b, -:c-}", "{:a, :b, +:x+}")
refute_diff({:a} = {:a, :b}, "{:a}", "{:a, +:b+}")
refute_diff({:a, :b} = {:a}, "{:a, -:b-}", "{:a}")
refute_diff({:ok, value} = {:error, :fatal}, "{-:ok-, value}", "{+:error+, :fatal}")
refute_diff({:a, :b} = :a, "-{:a, :b}-", "+:a+")
end
test "tuples outside of match context" do
assert_diff({:a, :b} == {:a, :b}, [])
refute_diff({:a} == {:a, :b}, "{:a}", "{:a, +:b+}")
refute_diff({:a, :b} == {:a}, "{:a, -:b-}", "{:a}")
refute_diff({:{}, [], [:a]} == {:a}, "{-:{}-, -[]-, -[:a]-}", "{+:a+}")
refute_diff({:{}, [], [:a]} == :a, "-{:{}, [], [:a]}-", "+:a+")
refute_diff({:a, :b, :c} == {:a, :b, :x}, "{:a, :b, -:c-}", "{:a, :b, +:x+}")
end
test "maps" do
assert_diff(%{a: 1} = %{a: 1}, [])
assert_diff(%{a: 1} = %{a: 1, b: 2}, [])
assert_diff(%{a: 1, b: 2} = %{a: 1, b: 2}, [])
assert_diff(%{b: 2, a: 1} = %{a: 1, b: 2}, [])
assert_diff(%{a: 1, b: 2, c: 3} = %{a: 1, b: 2, c: 3}, [])
assert_diff(%{c: 3, b: 2, a: 1} = %{a: 1, b: 2, c: 3}, [])
refute_diff(%{a: 1, b: 2} = %{a: 1}, "%{a: 1, -b: 2-}", "%{a: 1}")
refute_diff(%{a: 1, b: 2} = %{a: 1, b: 12}, "%{a: 1, b: 2}", "%{a: 1, b: +1+2}")
refute_diff(%{a: 1, b: 2} = %{a: 1, c: 2}, "%{a: 1, -b: 2-}", "%{a: 1, c: 2}")
refute_diff(%{a: 1, b: 2, c: 3} = %{a: 1, b: 12}, "%{a: 1, b: 2, -c: 3-}", "%{a: 1, b: +1+2}")
refute_diff(%{a: 1, b: 2, c: 3} = %{a: 1, c: 2}, "%{a: 1, -b: 2-, c: -3-}", "%{a: 1, c: +2+}")
refute_diff(%{a: 1} = %{a: 2, b: 2, c: 3}, "%{a: -1-}", "%{a: +2+, b: 2, c: 3}")
refute_diff(
%{1 => :a, 2 => :b} = %{1 => :a, 12 => :b},
"%{1 => :a, -2 => :b-}",
"%{1 => :a, 12 => :b}"
)
refute_diff(
%{1 => :a, 2 => :b} = %{1 => :a, :b => 2},
"%{1 => :a, -2 => :b-}",
"%{1 => :a, :b => 2}"
)
pins = [a: :a, b: :b]
assert_diff(%{^a => 1} = %{a: 1}, [], pins)
assert_diff(%{^a => x} = %{a: 1}, [x: 1], pins)
refute_diff(%{^a => 1, :a => 2} = %{a: 1}, "%{^a => 1, -:a => 2-}", "%{a: 1}", pins)
refute_diff(
%{^a => x, ^b => x} = %{a: 1, b: 2},
"%{^a => x, ^b => -x-}",
"%{a: 1, b: +2+}",
pins
)
refute_diff(%{a: 1} = :a, "-%{a: 1}-", "+:a+")
end
test "maps outside match context" do
assert_diff(%{a: 1} == %{a: 1}, [])
assert_diff(%{a: 1, b: 2} == %{a: 1, b: 2}, [])
assert_diff(%{b: 2, a: 1} == %{a: 1, b: 2}, [])
assert_diff(%{a: 1, b: 2, c: 3} == %{a: 1, b: 2, c: 3}, [])
assert_diff(%{c: 3, b: 2, a: 1} == %{a: 1, b: 2, c: 3}, [])
refute_diff(%{a: 1} == %{a: 1, b: 2}, "%{a: 1}", "%{a: 1, +b: 2+}")
refute_diff(%{a: 1, b: 2} == %{a: 1}, "%{a: 1, -b: 2-}", "%{a: 1}")
refute_diff(%{a: 1, b: 12} == %{a: 1, b: 2}, "%{a: 1, b: -1-2}", "%{a: 1, b: 2}")
refute_diff(%{a: 1, b: 2} == %{a: 1, b: 12}, "%{a: 1, b: 2}", "%{a: 1, b: +1+2}")
refute_diff(%{a: 1, b: 2} == %{a: 1, c: 2}, "%{a: 1, -b: 2-}", "%{a: 1, +c: 2+}")
end
test "structs" do
assert_diff(%User{age: 16} = %User{age: 16}, [])
assert_diff(%User{age: 16} = %{age: 16}, [])
assert_diff(%{age: 16, __struct__: User} = %User{age: 16}, [])
refute_diff(
%User{age: 16} = %User{age: 21},
"%ExUnit.DiffTest.User{age: 1-6-}",
"%ExUnit.DiffTest.User{age: +2+1, name: nil}"
)
refute_diff(
%User{age: 16} = %Person{age: 21},
"%-ExUnit.DiffTest.User-{age: 1-6-}",
"%+ExUnit.DiffTest.Person+{age: +2+1}"
)
refute_diff(
%User{age: 16} = %Person{age: 21},
"%-ExUnit.DiffTest.User-{age: 1-6-}",
"%+ExUnit.DiffTest.Person+{age: +2+1}"
)
refute_diff(
%User{age: 16} = %{age: 21},
"%-ExUnit.DiffTest.User-{age: 1-6-}",
"%{age: +2+1}"
)
refute_diff(
%{age: 16, __struct__: Person} = %User{age: 16},
"%-ExUnit.DiffTest.Person-{age: 16}",
"%+ExUnit.DiffTest.User+{age: 16, name: nil}"
)
pins = [twenty_one: 21]
assert_diff(%User{age: ^twenty_one} = %User{age: 21}, [], pins)
assert_diff(%User{age: age} = %User{age: 21}, [age: 21], pins)
refute_diff(
%User{^twenty_one => 21} = %User{age: 21},
"%ExUnit.DiffTest.User{-^twenty_one => 21-}",
"%ExUnit.DiffTest.User{age: 21, name: nil}",
pins
)
refute_diff(%User{age: 21} = :a, "-%ExUnit.DiffTest.User{age: 21}-", "+:a+", pins)
end
test "structs outside of match context" do
assert_diff(%User{age: 16} == %User{age: 16}, [])
assert_diff(%{age: 16, __struct__: User, name: nil} == %User{age: 16}, [])
refute_diff(
%User{age: 16} == %{age: 16},
"%-ExUnit.DiffTest.User-{age: 16, -name: nil-}",
"%{age: 16}"
)
refute_diff(
%User{age: 16} == %User{age: 21},
"%ExUnit.DiffTest.User{age: 1-6-, name: nil}",
"%ExUnit.DiffTest.User{age: +2+1, name: nil}"
)
refute_diff(
%User{age: 16} == %Person{age: 21},
"%-ExUnit.DiffTest.User-{age: 1-6-, -name: nil-}",
"%+ExUnit.DiffTest.Person+{age: +2+1}"
)
end
test "structs with inspect" do
refute_diff(
~D[2017-10-01] = ~D[2017-10-02],
~s/-~D"2017-10-01"-/,
"~D[2017-10-0+2+]"
)
end
test "structs with inspect outside match context" do
refute_diff(
~D[2017-10-01] == ~D[2017-10-02],
"~D[2017-10-0-1-]",
"~D[2017-10-0+2+]"
)
end
test "structs without inspect difference" do
refute_diff(
%Opaque{data: 1} == %Opaque{data: 2},
"%ExUnit.DiffTest.Opaque{data: -1-}",
"%ExUnit.DiffTest.Opaque{data: +2+}"
)
end
test "strings" do
assert_diff("" = "", [])
assert_diff("fox hops over the dog" = "fox hops over the dog", [])
refute_diff("fox" = "foo", "fo-x-", "fo+o+")
refute_diff(
"fox hops over \"the dog" = "fox jumps over the lazy cat",
~s/"fox -ho-ps over -\\\"-the -dog-"/,
~s/"fox + jum+ps over the + lazy cat+"/
)
refute_diff(
"short" = "really long string that should not emit diff against short",
~s/"-short-"/,
~s/"+really long string that should not emit diff against short+"/
)
refute_diff("foo" = :a, ~s/-"foo"-/, "+:a+")
end
test "concat operator" do
assert_diff("fox hops" <> " over the dog" = "fox hops over the dog", [])
assert_diff("fox hops " <> "over " <> "the dog" = "fox hops over the dog", [])
refute_diff(
"fox hops" <> " under the dog" = "fox hops over the dog",
~s/"fox hops" <> " -und-er the dog"/,
~s/"fox hops +ov+er the dog"/
)
refute_diff(
"fox hops over" <> " the dog" = "fox hops over",
~s/"fox hops over" <> "- the dog-"/,
~s/"fox hops over"/
)
refute_diff(
"fox hops" <> " over the dog" = "fox",
~s/"-fox hops-" <> "- over the dog-"/,
~s/"+fox+"/
)
refute_diff(
"fox" <> " hops" = "fox h",
~s/"fox" <> " h-ops-"/,
~s/"fox h"/
)
refute_diff(
"fox hops " <> "hover " <> "the dog" = "fox hops over the dog",
~s/"fox hops " <> "-h-over " <> "the dog"/,
~s/"fox hops over the dog"/
)
pins = [x: " over the dog"]
assert_diff("fox hops" <> x = "fox hops over the dog", x: " over the dog")
assert_diff("fox hops " <> "over " <> x = "fox hops over the dog", x: "the dog")
assert_diff("fox hops" <> ^x = "fox hops over the dog", [], pins)
refute_diff(
"fox hops " <> "hover " <> x = "fox hops over the dog",
~s/"fox hops " <> "-h-over " <> x/,
~s/"fox hops over +t+he dog"/
)
refute_diff(
"fox hops " <> "hover " <> ^x = "fox hops over the dog",
~s/"fox hops " <> "-h-over " <> -^x-/,
~s/"fox hops over +t+he dog"/,
pins
)
refute_diff("fox" <> " hops" = :a, ~s/-"fox" <> " hops"-/, "+:a+")
end
test "underscore" do
assert_diff(_ = :a, [])
assert_diff({_, _} = {:a, :b}, [])
refute_diff({_, :a} = {:b, :b}, "{_, -:a-}", "{:b, +:b+}")
end
test "macros" do
assert_diff(one() = 1, [])
assert_diff(tuple(x, x) = {1, 1}, x: 1)
refute_diff(one() = 2, "-one()-", "+2+")
refute_diff(tuple(x, x) = {1, 2}, "-tuple(x, x)-", "{1, +2+}")
pins = [x: 1]
assert_diff(pin_x() = 1, [], pins)
refute_diff(pin_x() = 2, "-pin_x()-", "+2+", pins)
end
test "guards" do
assert_diff((x when x == 0) = 0, x: 0)
assert_diff((x when x == 0 and is_integer(x)) = 0, x: 0)
assert_diff((x when x == 0 or x == 1) = 0, x: 0)
assert_diff((x when x == 0 when x == 1) = 0, x: 0)
assert_diff((x when one() == 1) = 0, x: 0)
refute_diff((x when x == 1) = 0, "x when -x == 1-", "0")
refute_diff((x when x == 0 and x == 1) = 0, "x when x == 0 and -x == 1-", "0")
refute_diff((x when x == 1 and x == 2) = 0, "x when -x == 1- and -x == 2-", "0")
refute_diff((x when x == 1 or x == 2) = 0, "x when -x == 1- or -x == 2-", "0")
refute_diff((x when x == 1 when x == 2) = 0, "x when -x == 1- when -x == 2-", "0")
refute_diff((x when x in [1, 2]) = 0, "x when -x in [1, 2]-", "0")
end
test "charlists" do
refute_diff(
'fox hops over \'the dog' = 'fox jumps over the lazy cat',
"'fox -ho-ps over -\\'-the -dog-'",
"'fox +jum+ps over the +lazy cat+'"
)
end
test "refs" do
ref1 = make_ref()
ref2 = make_ref()
inspect_ref1 = inspect(ref1)
inspect_ref2 = inspect(ref2)
assert_diff(ref1 == ref1, [])
assert_diff({ref1, ref2} == {ref1, ref2}, [])
refute_diff(ref1 == ref2, "-#{inspect_ref1}-", "+#{inspect_ref2}+")
refute_diff(
{ref1, ref2} == {ref2, ref1},
"{-#{inspect_ref1}-, -#{inspect_ref2}-}",
"{+#{inspect_ref2}+, +#{inspect_ref1}+}"
)
refute_diff(
{ref1, ref2} == ref1,
"-{#{inspect_ref1}, #{inspect_ref2}}-",
"+#{inspect_ref1}+"
)
refute_diff(
ref1 == {ref1, ref2},
"-#{inspect_ref1}-",
"+{#{inspect_ref1}, #{inspect_ref2}}+"
)
refute_diff(ref1 == :a, "-#{inspect_ref1}-", "+:a+")
refute_diff({ref1, ref2} == :a, "-{#{inspect_ref1}, #{inspect_ref2}}", "+:a+")
refute_diff(%{ref1 => ref2} == :a, "-%{#{inspect_ref1} => #{inspect_ref2}}", "+:a+")
refute_diff(
%Opaque{data: ref1} == :a,
"-%ExUnit.DiffTest.Opaque{data: #{inspect_ref1}}",
"+:a+"
)
end
test "pids" do
pid = self()
inspect_pid = inspect(pid)
assert_diff(pid == pid, [])
assert_diff({pid, pid} == {pid, pid}, [])
refute_diff(pid == :a, "-#{inspect_pid}-", "+:a+")
refute_diff({pid, pid} == :a, "-{#{inspect_pid}, #{inspect_pid}}", "+:a+")
refute_diff({pid, :a} == {:a, pid}, "{-#{inspect_pid}-, -:a-}", "{+:a+, +#{inspect_pid}+}")
refute_diff(%{pid => pid} == :a, "-%{#{inspect_pid} => #{inspect_pid}}", "+:a+")
refute_diff(
%Opaque{data: pid} == :a,
"-%ExUnit.DiffTest.Opaque{data: #{inspect_pid}}",
"+:a+"
)
end
test "functions" do
identity = & &1
inspect = inspect(identity)
assert_diff(identity == identity, [])
assert_diff({identity, identity} == {identity, identity}, [])
refute_diff(identity == :a, "-#{inspect}-", "+:a+")
refute_diff({identity, identity} == :a, "-{#{inspect}, #{inspect}}", "+:a+")
refute_diff({identity, :a} == {:a, identity}, "{-#{inspect}-, -:a-}", "{+:a+, +#{inspect}+}")
refute_diff(%{identity => identity} == :a, "-%{#{inspect} => #{inspect}}", "+:a+")
refute_diff(
%Opaque{data: identity} == :a,
"-%ExUnit.DiffTest.Opaque{data: #{inspect}}",
"+:a+"
)
end
test "not supported" do
refute_diff(
<<147, 1, 2, 31>> = <<193, 1, 31>>,
"-<<147, 1, 2, 31>>-",
"+<<193, 1, 31>>+"
)
end
defp refute_diff(left, right, expected_left, expected_right, context) do
{diff, _env} = Diff.compute(left, right, context)
assert diff.equivalent? == false
diff_left =
diff.left
|> Diff.to_algebra(&diff_wrapper(&1, "-"))
|> Algebra.format(:infinity)
|> IO.iodata_to_binary()
assert diff_left =~ expected_left
diff_right =
diff.right
|> Diff.to_algebra(&diff_wrapper(&1, "+"))
|> Algebra.format(:infinity)
|> IO.iodata_to_binary()
assert diff_right =~ expected_right
end
defp assert_diff(left, right, expected_binding, context) do
{diff, env} = Diff.compute(left, right, context)
env_binding = for {{name, _}, value} <- env.current_vars, do: {name, value}
assert diff.equivalent? == true
assert env_binding == expected_binding
end
defp diff_wrapper(doc, side) do
Algebra.concat([side, doc, side])
end
end
| 30.727617 | 98 | 0.430295 |
ffafb91e63a73a28d4288eb6613908e6b8139eef | 1,091 | exs | Elixir | test/farmbot_ext/api/reconciler_test.exs | bahanni/custom_rpi4 | ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5 | [
"MIT"
] | null | null | null | test/farmbot_ext/api/reconciler_test.exs | bahanni/custom_rpi4 | ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5 | [
"MIT"
] | null | null | null | test/farmbot_ext/api/reconciler_test.exs | bahanni/custom_rpi4 | ddefa85d30bacaae40151a63a9a0ebbf4ad30ed5 | [
"MIT"
] | null | null | null | defmodule FarmbotOS.API.ReconcilerTest do
require Helpers
use ExUnit.Case
use Mimic
alias FarmbotOS.API.Reconciler
alias FarmbotOS.API
setup :verify_on_exit!
@fake_sync %Ecto.Changeset{
action: nil,
changes: %{
devices: [],
farmware_envs: [],
fbos_configs: [],
firmware_configs: [],
first_party_farmwares: [],
peripherals: [],
point_groups: [],
points: [],
regimens: [],
sensors: [],
sequences: [],
tools: []
},
errors: [],
data: %FarmbotOS.Asset.Sync{},
valid?: true
}
test "sync/0" do
FarmbotOS.API
nope = fn -> [] end
expect(FarmbotOS.API.SyncGroup, :group_0, 1, nope)
expect(FarmbotOS.API.SyncGroup, :group_1, 1, nope)
expect(FarmbotOS.API.SyncGroup, :group_2, 1, nope)
expect(FarmbotOS.API.SyncGroup, :group_3, 1, nope)
expect(FarmbotOS.API.SyncGroup, :group_4, 1, nope)
expect(API, :get_changeset, 1, fn mod ->
assert mod == FarmbotOS.Asset.Sync
{:ok, @fake_sync}
end)
assert :ok == Reconciler.sync()
end
end
| 21.392157 | 54 | 0.605866 |
ffafc221bc59847e935c944ab5cf9ffbce45eb0d | 3,952 | exs | Elixir | test/liveview_todos_web/live/todo_live_test.exs | mwindholtz/liveview_todos | 1f6a2e576be4a41f49d0bfe2b01da97d268d0d0d | [
"Apache-2.0"
] | null | null | null | test/liveview_todos_web/live/todo_live_test.exs | mwindholtz/liveview_todos | 1f6a2e576be4a41f49d0bfe2b01da97d268d0d0d | [
"Apache-2.0"
] | 2 | 2021-03-09T14:03:26.000Z | 2021-05-10T06:02:53.000Z | test/liveview_todos_web/live/todo_live_test.exs | mwindholtz/liveview_todos | 1f6a2e576be4a41f49d0bfe2b01da97d268d0d0d | [
"Apache-2.0"
] | null | null | null | defmodule LiveviewTodosWeb.TodoLiveTest do
use LiveviewTodosWeb.ConnCase
alias LiveviewTodos.DomainEvent
alias LiveviewTodosWeb.TodoLive
alias Phoenix.LiveView
alias Phoenix.LiveView.Socket
import ExUnit.CaptureLog
defmodule TodoApplicationServiceStub do
def create_list(name, observer_pid) when is_pid(observer_pid) do
send(self(), {:create_list, name})
{:ok, %LiveviewTodos.List{}}
end
def accept(%DomainEvent{} = event) do
send(self(), {event.name, event})
:ok
end
end
defmodule CommandStub do
def refresh_lists(%Socket{} = socket) do
send(self(), {:refresh_lists, socket})
:ok
end
end
def socket_with_stub do
%Socket{}
|> LiveView.assign(:todo_application_service, TodoApplicationServiceStub)
|> LiveView.assign(:command, CommandStub)
end
describe "TodoLive.handle_event" do
setup do
list_id = 1
:ok = LiveviewTodos.TargetedTopic.subscribe(list_id)
%{list_id: list_id}
end
test "create-list" do
name = "Home stuff"
attrs = %{"name" => name}
{:noreply, _mod_socket} =
TodoLive.handle_event("create-list", %{"list" => attrs}, socket_with_stub())
assert_receive {:create_list, name}
end
test "delete-list", %{list_id: list_id} do
attrs = %{"list-id" => "#{list_id}"}
{:noreply, _mod_socket} = TodoLive.handle_event("delete-list", attrs, socket_with_stub())
assert_receive %LiveviewTodos.DomainEvent{
attrs: %{list_id: ^list_id},
name: :delete_list_requested
}
end
test "add-item", %{list_id: list_id} do
item = %{"description" => "Buy milk and eggs", "list_id" => "#{list_id}"}
{:noreply, _mod_socket} =
TodoLive.handle_event("add-item", %{"item" => item}, socket_with_stub())
assert_receive %LiveviewTodos.DomainEvent{
attrs: %{description: "Buy milk and eggs", list_id: 1},
name: :create_item_requested
}
end
test "toggle_done", %{list_id: list_id} do
{:noreply, _mod_socket} =
TodoLive.handle_event(
"toggle_done",
%{"list-id" => "#{list_id}", "item-title" => "title"},
socket_with_stub()
)
assert_receive %LiveviewTodos.DomainEvent{
attrs: %{item_title: "title", list_id: ^list_id},
name: :toggle_item_requested
}
end
end
describe "TodoLive.handle_info" do
test "UNHANDED" do
expected_log_message = "UNHANDED PUBSUB TUPLE: {:unexpected, 99}"
# When
log =
capture_log(fn ->
TodoLive.handle_info({:unexpected, 99}, socket_with_stub())
end)
assert log =~ expected_log_message
end
test "list_create so refresh_lists" do
# When
{:noreply, _mod_socket} =
TodoLive.handle_info(
%DomainEvent{name: :list_created, attrs: %{list_id: 99}},
socket_with_stub()
)
assert_receive {:refresh_lists, _socket}
end
test "todo_created so refresh_lists" do
# When
{:noreply, _mod_socket} =
TodoLive.handle_info(
%DomainEvent{name: :todo_created, attrs: %{list_id: 99, title: "Task Name"}},
socket_with_stub()
)
assert_receive {:refresh_lists, _socket}
end
test "list_item_toggled so refresh_lists" do
# When
{:noreply, _mod_socket} =
TodoLive.handle_info(
%DomainEvent{name: :list_item_toggled, attrs: %{list_id: 99}},
socket_with_stub()
)
assert_receive {:refresh_lists, _socket}
end
test "list_deleted so refresh_lists" do
# When
{:noreply, _mod_socket} =
TodoLive.handle_info(
%DomainEvent{name: :list_deleted, attrs: %{list_id: 99}},
socket_with_stub()
)
assert_receive {:refresh_lists, _socket}
end
end
# WIP, needs test for LiveviewTodosWeb.TodoLive.Command
end
| 26.884354 | 95 | 0.623482 |
ffafd0b4927664a7d2523b355438a6efda9772ac | 3,079 | ex | Elixir | lib/receipt_verifier/receipt/iap_receipt.ex | portal-labs/receipt_verifier | d8d2d0e6382874cfc35daa27148be39a2fb23aeb | [
"MIT"
] | null | null | null | lib/receipt_verifier/receipt/iap_receipt.ex | portal-labs/receipt_verifier | d8d2d0e6382874cfc35daa27148be39a2fb23aeb | [
"MIT"
] | null | null | null | lib/receipt_verifier/receipt/iap_receipt.ex | portal-labs/receipt_verifier | d8d2d0e6382874cfc35daa27148be39a2fb23aeb | [
"MIT"
] | null | null | null | defmodule ReceiptVerifier.IAPReceipt do
@moduledoc """
The struct represent an In-App Purchase Receipt
"""
@type t :: %__MODULE__{
web_order_line_item_id: String.t(),
transaction_id: String.t(),
quantity: integer,
purchase_date: DateTime.t(),
product_id: String.t(),
original_transaction_id: String.t(),
original_purchase_date: DateTime.t(),
is_trial_period: boolean(),
is_in_intro_offer_period: boolean(),
expires_date: DateTime.t(),
cancellation_date: DateTime.t(),
cancellation_reason: String.t(),
subscription_group_identifier: String.t(),
in_app_ownership_type: String.t(),
promotional_offer_id: String.t(),
offer_code_ref_name: String.t()
}
defstruct [
:web_order_line_item_id,
:transaction_id,
:quantity,
:purchase_date,
:product_id,
:original_transaction_id,
:original_purchase_date,
:is_trial_period,
:is_in_intro_offer_period,
:expires_date,
:cancellation_date,
:cancellation_reason,
:subscription_group_identifier,
:in_app_ownership_type,
:promotional_offer_id,
:offer_code_ref_name
]
@doc false
@spec parse(map) :: t
def parse(data) when is_map(data) do
attrs =
data
|> Enum.map(&do_parse_field/1)
struct(__MODULE__, attrs)
end
defp do_parse_field({"purchase_date_ms", value}) do
{:purchase_date, format_datetime(value)}
end
defp do_parse_field({"purchase_date", _value}) do
{:skip, nil}
end
defp do_parse_field({"purchase_date_pst", _value}) do
{:skip, nil}
end
defp do_parse_field({"original_purchase_date_ms", value}) do
{:original_purchase_date, format_datetime(value)}
end
defp do_parse_field({"original_purchase_date", _value}) do
{:skip, nil}
end
defp do_parse_field({"original_purchase_date_pst", _value}) do
{:skip, nil}
end
defp do_parse_field({"is_trial_period", value}) do
# In elixir, true is :true
{:is_trial_period, String.to_atom(value)}
end
defp do_parse_field({"is_in_intro_offer_period", value}) do
{:is_in_intro_offer_period, String.to_atom(value)}
end
defp do_parse_field({"quantity", value}) do
{:quantity, String.to_integer(value)}
end
defp do_parse_field({"expires_date_ms", value}) do
{:expires_date, format_datetime(value)}
end
defp do_parse_field({"expires_date", _value}) do
{:skip, nil}
end
defp do_parse_field({"expires_date_pst", _value}) do
{:skip, nil}
end
defp do_parse_field({"cancellation_date_ms", value}) do
{:cancellation_date, format_datetime(value)}
end
defp do_parse_field({"cancellation_date", _value}) do
{:skip, nil}
end
defp do_parse_field({"cancellation_date_pst", _value}) do
{:skip, nil}
end
defp do_parse_field({field, value}) do
{String.to_atom(field), value}
end
defp format_datetime(datetime) do
datetime
|> String.to_integer()
|> DateTime.from_unix!(:millisecond)
end
end
| 24.632 | 64 | 0.671971 |
ffb009cb7b9eb489df8e95c7223424ca66cb4303 | 12,795 | ex | Elixir | lib/redix/connection.ex | SoCal-Software-Labs/safe-redix | 6ac3d42c104ee3a2bcd5d726aaca1474e95cc29f | [
"MIT"
] | 968 | 2015-08-17T14:14:57.000Z | 2022-03-29T03:39:17.000Z | lib/redix/connection.ex | SoCal-Software-Labs/safe-redix | 6ac3d42c104ee3a2bcd5d726aaca1474e95cc29f | [
"MIT"
] | 192 | 2015-08-17T20:39:57.000Z | 2022-03-23T08:48:36.000Z | lib/redix/connection.ex | SoCal-Software-Labs/safe-redix | 6ac3d42c104ee3a2bcd5d726aaca1474e95cc29f | [
"MIT"
] | 145 | 2015-08-17T20:38:22.000Z | 2022-03-04T22:59:47.000Z | defmodule Redix.Connection do
@moduledoc false
alias Redix.{ConnectionError, Format, Protocol, SocketOwner, StartOptions}
@behaviour :gen_statem
defstruct [
:opts,
:transport,
:socket_owner,
:table,
:socket,
:backoff_current,
:connected_address,
counter: 0,
client_reply: :on
]
@backoff_exponent 1.5
## Public API
def start_link(opts) when is_list(opts) do
opts = StartOptions.sanitize(opts)
{gen_statem_opts, opts} = Keyword.split(opts, [:hibernate_after, :debug, :spawn_opt])
case Keyword.fetch(opts, :name) do
:error ->
:gen_statem.start_link(__MODULE__, opts, gen_statem_opts)
{:ok, atom} when is_atom(atom) ->
:gen_statem.start_link({:local, atom}, __MODULE__, opts, gen_statem_opts)
{:ok, {:global, _term} = tuple} ->
:gen_statem.start_link(tuple, __MODULE__, opts, gen_statem_opts)
{:ok, {:via, via_module, _term} = tuple} when is_atom(via_module) ->
:gen_statem.start_link(tuple, __MODULE__, opts, gen_statem_opts)
{:ok, other} ->
raise ArgumentError, """
expected :name option to be one of the following:
* nil
* atom
* {:global, term}
* {:via, module, term}
Got: #{inspect(other)}
"""
end
end
def stop(conn, timeout) do
:gen_statem.stop(conn, :normal, timeout)
end
def pipeline(conn, commands, timeout, telemetry_metadata) do
conn_pid = GenServer.whereis(conn)
request_id = Process.monitor(conn_pid)
telemetry_metadata = telemetry_pipeline_metadata(conn, conn_pid, commands, telemetry_metadata)
start_time = System.monotonic_time()
:ok = execute_telemetry_pipeline_start(telemetry_metadata)
# We cast to the connection process knowing that it will reply at some point,
# either after roughly timeout or when a response is ready.
cast = {:pipeline, commands, _from = {self(), request_id}, timeout}
:ok = :gen_statem.cast(conn_pid, cast)
receive do
{^request_id, resp} ->
_ = Process.demonitor(request_id, [:flush])
:ok = execute_telemetry_pipeline_stop(telemetry_metadata, start_time, resp)
resp
{:DOWN, ^request_id, _, _, reason} ->
exit(reason)
end
end
defp telemetry_pipeline_metadata(conn, conn_pid, commands, telemetry_metadata) do
name =
if is_pid(conn) do
nil
else
conn
end
%{
connection: conn_pid,
connection_name: name,
commands: commands,
extra_metadata: telemetry_metadata
}
end
defp execute_telemetry_pipeline_start(metadata) do
measurements = %{system_time: System.system_time()}
:ok = :telemetry.execute([:redix, :pipeline, :start], measurements, metadata)
end
defp execute_telemetry_pipeline_stop(metadata, start_time, response) do
measurements = %{duration: System.monotonic_time() - start_time}
metadata =
case response do
{:ok, _response} -> metadata
{:error, reason} -> Map.merge(metadata, %{kind: :error, reason: reason})
end
:ok = :telemetry.execute([:redix, :pipeline, :stop], measurements, metadata)
end
## Callbacks
## Init callbacks
@impl true
def callback_mode(), do: :state_functions
@impl true
def init(opts) do
transport = if(opts[:ssl], do: :ssl, else: :gen_tcp)
queue_table = :ets.new(:queue, [:ordered_set, :public])
{:ok, socket_owner} = SocketOwner.start_link(self(), opts, queue_table)
data = %__MODULE__{
opts: opts,
table: queue_table,
socket_owner: socket_owner,
transport: transport
}
if opts[:sync_connect] do
# We don't need to handle a timeout here because we're using a timeout in
# connect/3 down the pipe.
receive do
{:connected, ^socket_owner, socket, address} ->
:telemetry.execute([:redix, :connection], %{}, %{
connection: self(),
connection_name: data.opts[:name],
address: address
})
{:ok, :connected, %__MODULE__{data | socket: socket, connected_address: address}}
{:stopped, ^socket_owner, reason} ->
{:stop, %Redix.ConnectionError{reason: reason}}
end
else
{:ok, :connecting, data}
end
end
@impl true
def terminate(reason, _state, data) do
if Process.alive?(data.socket_owner) and reason == :normal do
:ok = SocketOwner.normal_stop(data.socket_owner)
end
end
## State functions
# "Disconnected" state: the connection is down and the socket owner is not alive.
# We want to connect/reconnect. We start the socket owner process and then go in the :connecting
# state.
def disconnected({:timeout, :reconnect}, _timer_info, %__MODULE__{} = data) do
{:ok, socket_owner} = SocketOwner.start_link(self(), data.opts, data.table)
new_data = %{data | socket_owner: socket_owner}
{:next_state, :connecting, new_data}
end
def disconnected({:timeout, {:client_timed_out, _counter}}, _from, _data) do
:keep_state_and_data
end
def disconnected(:internal, {:notify_of_disconnection, _reason}, %__MODULE__{table: table}) do
fun = fn {_counter, from, _ncommands, timed_out?}, _acc ->
if not timed_out?, do: reply(from, {:error, %ConnectionError{reason: :disconnected}})
end
:ets.foldl(fun, nil, table)
:ets.delete_all_objects(table)
:keep_state_and_data
end
def disconnected(:cast, {:pipeline, _commands, from, _timeout}, _data) do
reply(from, {:error, %ConnectionError{reason: :closed}})
:keep_state_and_data
end
# This happens when there's a send error. We close the socket right away, but we wait for
# the socket owner to die so that it can finish processing the data it's processing. When it's
# dead, we go ahead and notify the remaining clients, setup backoff, and so on.
def disconnected(:info, {:stopped, owner, reason}, %__MODULE__{socket_owner: owner} = data) do
:telemetry.execute([:redix, :disconnection], %{}, %{
connection: self(),
connection_name: data.opts[:name],
address: data.connected_address,
reason: %ConnectionError{reason: reason}
})
data = %{data | connected_address: nil}
disconnect(data, reason)
end
def connecting(
:info,
{:connected, owner, socket, address},
%__MODULE__{socket_owner: owner} = data
) do
:telemetry.execute([:redix, :connection], %{}, %{
connection: self(),
connection_name: data.opts[:name],
address: address,
reconnection: not is_nil(data.backoff_current)
})
data = %{data | socket: socket, backoff_current: nil, connected_address: address}
{:next_state, :connected, %{data | socket: socket}}
end
def connecting(:cast, {:pipeline, _commands, _from, _timeout}, _data) do
{:keep_state_and_data, :postpone}
end
def connecting(:info, {:stopped, owner, reason}, %__MODULE__{socket_owner: owner} = data) do
# We log this when the socket owner stopped while connecting.
:telemetry.execute([:redix, :failed_connection], %{}, %{
connection: self(),
connection_name: data.opts[:name],
address: format_address(data),
reason: %ConnectionError{reason: reason}
})
disconnect(data, reason)
end
def connecting({:timeout, {:client_timed_out, _counter}}, _from, _data) do
:keep_state_and_data
end
def connected(:cast, {:pipeline, commands, from, timeout}, data) do
{ncommands, data} = get_client_reply(data, commands)
if ncommands > 0 do
{counter, data} = get_and_update_in(data.counter, &{&1, &1 + 1})
row = {counter, from, ncommands, _timed_out? = false}
:ets.insert(data.table, row)
case data.transport.send(data.socket, Enum.map(commands, &Protocol.pack/1)) do
:ok ->
actions =
case timeout do
:infinity -> []
_other -> [{{:timeout, {:client_timed_out, counter}}, timeout, from}]
end
{:keep_state, data, actions}
{:error, _reason} ->
# The socket owner will get a closed message at some point, so we just move to the
# disconnected state.
:ok = data.transport.close(data.socket)
{:next_state, :disconnected, data}
end
else
reply(from, {:ok, []})
{:keep_state, data}
end
end
def connected(:info, {:stopped, owner, reason}, %__MODULE__{socket_owner: owner} = data) do
:telemetry.execute([:redix, :disconnection], %{}, %{
connection: self(),
connection_name: data.opts[:name],
address: data.connected_address,
reason: %ConnectionError{reason: reason}
})
data = %{data | connected_address: nil}
disconnect(data, reason)
end
def connected({:timeout, {:client_timed_out, counter}}, from, %__MODULE__{} = data) do
if _found? = :ets.update_element(data.table, counter, {4, _timed_out? = true}) do
reply(from, {:error, %ConnectionError{reason: :timeout}})
end
:keep_state_and_data
end
## Helpers
defp reply({pid, request_id} = _from, reply) do
send(pid, {request_id, reply})
end
defp disconnect(_data, %Redix.Error{} = error) do
{:stop, error}
end
defp disconnect(data, reason) do
if data.opts[:exit_on_disconnection] do
{:stop, %ConnectionError{reason: reason}}
else
{backoff, data} = next_backoff(data)
actions = [
{:next_event, :internal, {:notify_of_disconnection, reason}},
{{:timeout, :reconnect}, backoff, nil}
]
{:next_state, :disconnected, data, actions}
end
end
defp next_backoff(%__MODULE__{backoff_current: nil} = data) do
backoff_initial = data.opts[:backoff_initial]
{backoff_initial, %{data | backoff_current: backoff_initial}}
end
defp next_backoff(data) do
next_exponential_backoff = round(data.backoff_current * @backoff_exponent)
backoff_current =
if data.opts[:backoff_max] == :infinity do
next_exponential_backoff
else
min(next_exponential_backoff, Keyword.fetch!(data.opts, :backoff_max))
end
{backoff_current, %{data | backoff_current: backoff_current}}
end
defp get_client_reply(data, commands) do
{ncommands, client_reply} = get_client_reply(commands, _ncommands = 0, data.client_reply)
{ncommands, put_in(data.client_reply, client_reply)}
end
defp get_client_reply([], ncommands, client_reply) do
{ncommands, client_reply}
end
defp get_client_reply([command | rest], ncommands, client_reply) do
case parse_client_reply(command) do
:off -> get_client_reply(rest, ncommands, :off)
:skip when client_reply == :off -> get_client_reply(rest, ncommands, :off)
:skip -> get_client_reply(rest, ncommands, :skip)
:on -> get_client_reply(rest, ncommands + 1, :on)
nil when client_reply == :on -> get_client_reply(rest, ncommands + 1, client_reply)
nil when client_reply == :off -> get_client_reply(rest, ncommands, client_reply)
nil when client_reply == :skip -> get_client_reply(rest, ncommands, :on)
end
end
defp parse_client_reply(["CLIENT", "REPLY", "ON"]), do: :on
defp parse_client_reply(["CLIENT", "REPLY", "OFF"]), do: :off
defp parse_client_reply(["CLIENT", "REPLY", "SKIP"]), do: :skip
defp parse_client_reply(["client", "reply", "on"]), do: :on
defp parse_client_reply(["client", "reply", "off"]), do: :off
defp parse_client_reply(["client", "reply", "skip"]), do: :skip
defp parse_client_reply([part1, part2, part3])
when is_binary(part1) and byte_size(part1) == byte_size("CLIENT") and is_binary(part2) and
byte_size(part2) == byte_size("REPLY") and
is_binary(part3) and
byte_size(part3) in [byte_size("ON"), byte_size("OFF"), byte_size("SKIP")] do
# We need to do this in a "lazy" way: upcase the first string and check, then the second
# one, and then the third one. Before, we were upcasing all three parts first and then
# checking for a CLIENT REPLY * command. That meant that sometimes we would upcase huge
# but completely unrelated commands causing big memory and CPU spikes. See
# https://github.com/whatyouhide/redix/issues/177. "if" works here because and/2
# short-circuits.
if String.upcase(part1) == "CLIENT" and String.upcase(part2) == "REPLY" do
case String.upcase(part3) do
"ON" -> :on
"OFF" -> :off
"SKIP" -> :skip
_other -> nil
end
else
nil
end
end
defp parse_client_reply(_other), do: nil
defp format_address(%{opts: opts} = _state) do
if opts[:sentinel] do
"sentinel"
else
Format.format_host_and_port(opts[:host], opts[:port])
end
end
end
| 31.670792 | 98 | 0.651739 |
ffb03674528f148d56d0fc96cb60457181fe8656 | 892 | exs | Elixir | deps/connection/mix.exs | matin360/TaksoWebApp | 4dd8fef625ecc2364fe1d6e18e73c96c59d15349 | [
"MIT"
] | 1 | 2019-11-11T21:48:20.000Z | 2019-11-11T21:48:20.000Z | deps/connection/mix.exs | rwtrecs/rocketseat-nlw5-inmana | 8ce8bc32e0bdd005c423394bb163945747b557e2 | [
"MIT"
] | 4 | 2021-03-04T13:00:52.000Z | 2021-03-12T12:42:09.000Z | deps/connection/mix.exs | adrianomota/blog | ef3b2d2ed54f038368ead8234d76c18983caa75b | [
"MIT"
] | null | null | null | defmodule Connection.Mixfile do
use Mix.Project
@version "1.1.0"
def project do
[
app: :connection,
version: @version,
elixir: "~> 1.7",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
docs: docs(),
deps: deps()
]
end
def application do
[
applications: []
]
end
defp deps() do
[
{:ex_doc, "~> 0.22", only: :dev}
]
end
defp docs do
[
source_url: "https://github.com/elixir-ecto/connection",
source_ref: "v#{@version}",
main: Connection
]
end
defp description do
"""
Connection behaviour for connection processes
"""
end
defp package do
%{
licenses: ["Apache 2.0"],
links: %{"Github" => "https://github.com/elixir-ecto/connection"}
}
end
end
| 16.830189 | 71 | 0.55157 |
ffb0493c24555f2bcfbfaab6717c534d33619c4f | 968 | exs | Elixir | config/test.exs | HierBautBerlin/website | 91410e7c61c1efad438fe84bf550f87b0056c440 | [
"MIT"
] | 13 | 2021-03-06T12:16:34.000Z | 2022-03-31T09:46:35.000Z | config/test.exs | HierBautBerlin/website | 91410e7c61c1efad438fe84bf550f87b0056c440 | [
"MIT"
] | 148 | 2021-03-05T12:44:55.000Z | 2022-03-11T12:09:06.000Z | config/test.exs | HierBautBerlin/website | 91410e7c61c1efad438fe84bf550f87b0056c440 | [
"MIT"
] | 2 | 2021-06-02T14:31:21.000Z | 2022-02-14T08:36:51.000Z | import Config
config :hierbautberlin, :environment, :test
# Only in tests, remove the complexity from the password hashing algorithm
config :bcrypt_elixir, :log_rounds, 1
# Configure your database
#
# The MIX_TEST_PARTITION environment variable can be used
# to provide built-in test partitioning in CI environment.
# Run `mix help test` for more information.
config :hierbautberlin, Hierbautberlin.Repo,
username: "postgres",
password: "postgres",
database: "hierbautberlin_test#{System.get_env("MIX_TEST_PARTITION")}",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :hierbautberlin, HierbautberlinWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
config :hierbautberlin, HierbautberlinWeb.Mailer, adapter: Bamboo.TestAdapter
config :bamboo, :refute_timeout, 5
| 31.225806 | 77 | 0.774793 |
ffb05f27cb78ea1291a27320dd289d5d18a883a9 | 2,002 | ex | Elixir | lib/baiji_generator/spec/writer.ex | wrren/baiji_generator.ex | 7c36c0089d6b88576fa6e064e86c342f03729003 | [
"MIT"
] | null | null | null | lib/baiji_generator/spec/writer.ex | wrren/baiji_generator.ex | 7c36c0089d6b88576fa6e064e86c342f03729003 | [
"MIT"
] | null | null | null | lib/baiji_generator/spec/writer.ex | wrren/baiji_generator.ex | 7c36c0089d6b88576fa6e064e86c342f03729003 | [
"MIT"
] | null | null | null | defmodule Baiji.Generator.Spec.Writer do
@doc """
Given a list of specs, write generated .ex files for each
service to the path specified
"""
alias Baiji.Generator.Spec
def write(specs, write_path, template_file) when is_list(specs) do
specs
|> Enum.map(fn spec -> {spec, generate_service_file_contents(spec, template_file)} end)
|> Enum.each(fn {spec, contents} -> write_service_file(write_path, spec, contents) end)
end
def generate_service_file_contents(%Spec{} = spec, template_file) do
EEx.eval_file(template_file, [spec: spec, module: module_name(spec)], [trim: true])
end
def write_service_file(path, spec, contents) do
Path.join(path, file_name(spec))
|> File.write!(contents)
end
def then(out, fun), do: fun.(out)
@doc """
Given a spec, generate an appropriate output file name
"""
def file_name(%Spec{full_name: full_name, abbreviation: nil}) do
full_name
|> String.split([" ", "-", ".", "/"])
|> Enum.filter(fn "Amazon" -> false; "AWS" -> false; _ -> true end)
|> Enum.map(&String.downcase/1)
|> Enum.join("_")
|> Kernel.<>(".ex")
end
def file_name(%Spec{abbreviation: abbreviation}) do
abbreviation
|> String.split([" ", "-", ".", "/"])
|> Enum.filter(fn "Amazon" -> false; "AWS" -> false; _ -> true end)
|> Enum.map(&String.downcase/1)
|> Enum.join("_")
|> Kernel.<>(".ex")
end
@doc """
Forms an output module name from a service
"""
def module_name(%Spec{full_name: full_name, abbreviation: nil}) do
full_name
|> String.split([" ", "-", ".", "/"])
|> Enum.filter(fn "Amazon" -> false; "AWS" -> false; _ -> true end)
|> Enum.map(&String.capitalize/1)
|> Enum.join
end
def module_name(%Spec{abbreviation: abbreviation}) do
abbreviation
|> String.split([" ", "-", ".", "/"])
|> Enum.filter(fn "Amazon" -> false; "AWS" -> false; _ -> true end)
|> Enum.join
end
end | 32.819672 | 92 | 0.6004 |
ffb0713a8cff319a668f7305c008df3a9c70ecd3 | 1,334 | ex | Elixir | lib/lpass_client/application.ex | Arp-G/lpass_client | 43da579914191b1f1f326c28a614ef16acfcb12f | [
"MIT"
] | null | null | null | lib/lpass_client/application.ex | Arp-G/lpass_client | 43da579914191b1f1f326c28a614ef16acfcb12f | [
"MIT"
] | null | null | null | lib/lpass_client/application.ex | Arp-G/lpass_client | 43da579914191b1f1f326c28a614ef16acfcb12f | [
"MIT"
] | null | null | null | defmodule LpassClient.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
@impl true
def start(_type, _args) do
children = [
# Start the Telemetry supervisor
LpassClientWeb.Telemetry,
# Start the PubSub system
{Phoenix.PubSub, name: LpassClient.PubSub},
# Genserver to periodically sync local app with lastpass server
LpassClient.Syncer,
# Genserver to cache data
LpassClient.Cache,
# Custom hackney pool ":main" to serve more number of concurrent connections
:hackney_pool.child_spec(:main, timeout: 15_000, max_connections: 100),
# Start the Endpoint (http/https)
LpassClientWeb.Endpoint
# Start a worker by calling: LpassClient.Worker.start_link(arg)
# {LpassClient.Worker, arg}
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: LpassClient.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
@impl true
def config_change(changed, _new, removed) do
LpassClientWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 28.382979 | 82 | 0.710645 |
ffb07f7b38610545ccd6cf4a0f98ec132935b7da | 620 | ex | Elixir | lib/future_made_concerts_web/views/episode_view.ex | Future-Made/concerts-for-impact | 5532cd1be5252fa0ccb0b956f0961be8701e0e04 | [
"MIT"
] | null | null | null | lib/future_made_concerts_web/views/episode_view.ex | Future-Made/concerts-for-impact | 5532cd1be5252fa0ccb0b956f0961be8701e0e04 | [
"MIT"
] | null | null | null | lib/future_made_concerts_web/views/episode_view.ex | Future-Made/concerts-for-impact | 5532cd1be5252fa0ccb0b956f0961be8701e0e04 | [
"MIT"
] | null | null | null | defmodule FutureMadeConcertsWeb.EpisodeView do
@moduledoc false
use FutureMadeConcertsWeb, :view
alias FutureMadeConcerts.Spotify.Schema.{Episode, Player}
@spec playing?(Episode.t(), Player.t()) :: boolean()
defp playing?(%Episode{id: episode_id}, %Player{status: :playing, item: %{id: episode_id}}),
do: true
defp playing?(_episode, _now_playing), do: false
@spec rich_description(Episode.t()) :: Phoenix.HTML.safe()
defp rich_description(%Episode{description: description}) do
description
|> ExAutolink.link(args: %{rel: "nofollow noreferrer", target: "_blank"})
|> raw()
end
end
| 31 | 94 | 0.71129 |
ffb0da75c7b8068e5c6ffb8bd6a21a50fb04291c | 1,182 | exs | Elixir | mix.exs | NeoArcanjo/hound | 31f15d35aafcebc6263c28948f2bc84eefe8892d | [
"MIT"
] | null | null | null | mix.exs | NeoArcanjo/hound | 31f15d35aafcebc6263c28948f2bc84eefe8892d | [
"MIT"
] | null | null | null | mix.exs | NeoArcanjo/hound | 31f15d35aafcebc6263c28948f2bc84eefe8892d | [
"MIT"
] | null | null | null | defmodule Hound.Mixfile do
use Mix.Project
@version "1.2.2"
def project do
[
app: :hound,
version: @version,
elixir: ">= 1.9.0",
description:
"Webdriver library for integration testing and browser automation - Forked from HashNuke/hound",
source_url: "http://192.168.0.190/rafael/hound",
deps: deps(),
package: package(),
docs: [source_ref: "#{@version}", extras: ["README.md"], main: "readme"]
]
end
def application do
[
extra_applications: [:logger],
mod: {Hound.Application, []},
description: 'Integration testing and browser automation library'
]
end
defp deps do
[
{:hackney, "~> 1.17"},
{:jason, "~> 1.2.2"},
{:earmark, "~> 1.2", only: :dev},
{:ex_doc, "~> 0.16", only: :dev},
{:credo, ">= 0.0.0", only: [:dev, :test]},
{:doctor, "~> 0.18.0", only: :dev}
]
end
defp package do
[
maintainers: ["Leonardo Telles de Sá Ferreira","Rafael Arcanjo"],
licenses: ["MIT"],
links: %{
"Git" => "http://192.168.0.190/rafael/hound",
"Docs" => "http://hexdocs.pm/hound/"
}
]
end
end
| 23.64 | 104 | 0.538917 |
ffb0e1c785533c085cf330a6dcf21fc0028b8b16 | 1,889 | ex | Elixir | lib/brando/soft_delete/query.ex | univers-agency/brando | 69c3c52498a3f64518da3522cd9f27294a52cc68 | [
"Apache-2.0"
] | 1 | 2020-04-26T09:53:02.000Z | 2020-04-26T09:53:02.000Z | lib/brando/soft_delete/query.ex | univers-agency/brando | 69c3c52498a3f64518da3522cd9f27294a52cc68 | [
"Apache-2.0"
] | 198 | 2019-08-20T16:16:07.000Z | 2020-07-03T15:42:07.000Z | lib/brando/soft_delete/query.ex | univers-agency/brando | 69c3c52498a3f64518da3522cd9f27294a52cc68 | [
"Apache-2.0"
] | null | null | null | defmodule Brando.SoftDelete.Query do
@moduledoc """
Query tools for Soft deletion
"""
alias Brando.Trait
import Ecto.Query
@doc """
Excludes all deleted entries from query
"""
def exclude_deleted(query), do: from(t in query, where: is_nil(t.deleted_at))
@doc """
List all soft delete enabled schemas
"""
def list_soft_delete_schemas do
Trait.SoftDelete.list_implementations()
end
@doc """
Check if `schema` is soft deleted
"""
def soft_delete_schema?(schema), do: schema in list_soft_delete_schemas()
@doc """
Count all soft deleted entries per schema
"""
def count_soft_deletions do
schemas = list_soft_delete_schemas()
union_query =
Enum.reduce(schemas, nil, fn
schema, nil ->
from t in schema, select: count(t.id), where: not is_nil(t.deleted_at)
schema, q ->
from t in schema, select: count(t.id), where: not is_nil(t.deleted_at), union_all: ^q
end)
counts =
union_query
|> Brando.repo().all()
|> Enum.reverse()
Enum.zip(schemas, counts)
end
@doc """
List all soft deleted entries across schemas
"""
def list_soft_deleted_entries do
schemas = list_soft_delete_schemas()
Enum.flat_map(schemas, &list_soft_deleted_entries(&1))
end
@doc """
List soft deleted entries for `schema`
"""
def list_soft_deleted_entries(schema) do
query = from t in schema, where: not is_nil(t.deleted_at), order_by: [desc: t.deleted_at]
Brando.repo().all(query)
end
@doc """
Clean up and delete all expired soft deleted entries
"""
def clean_up_soft_deletions, do: Enum.map(list_soft_delete_schemas(), &clean_up_schema/1)
defp clean_up_schema(schema) do
query =
from t in schema,
where: fragment("? < current_timestamp - interval '30 day'", t.deleted_at)
Brando.repo().delete_all(query)
end
end
| 24.532468 | 95 | 0.670725 |
ffb0ff0ac31d32c2b3730869effcd6608925d167 | 5,187 | ex | Elixir | lib/square_up/resources/v2/payment.ex | beaver21/SquareUp | c9791d96ed9335926933403a966eba5076fbc15b | [
"MIT"
] | 4 | 2020-10-21T18:34:50.000Z | 2022-03-16T06:25:44.000Z | lib/square_up/resources/v2/payment.ex | beaver21/SquareUp | c9791d96ed9335926933403a966eba5076fbc15b | [
"MIT"
] | 5 | 2020-10-21T23:16:32.000Z | 2021-05-13T13:42:44.000Z | lib/square_up/resources/v2/payment.ex | beaver21/SquareUp | c9791d96ed9335926933403a966eba5076fbc15b | [
"MIT"
] | 3 | 2020-10-21T21:20:36.000Z | 2021-03-15T18:00:30.000Z | defmodule SquareUp.V2.Payment do
import Norm
import SquareUp.Client, only: [call: 2]
@spec refund(SquareUp.Client.t(), %{}, SquareUp.TypeSpecs.refund_payment_request(), %{}) ::
SquareUp.Client.response(SquareUp.TypeSpecs.refund_payment_response())
def refund(client, path_params \\ %{}, params \\ %{}, query_params \\ %{}) do
path_params_spec = schema(%{})
params_spec = Norm.Delegate.delegate(&SquareUp.NormSchema.refund_payment_request/0)
query_params_spec = schema(%{})
response_spec = {:delegate, &SquareUp.ResponseSchema.refund_payment_response/0}
call(client, %{
method: :post,
path_params: path_params,
params: params,
query_params: query_params,
path_params_spec: path_params_spec,
params_spec: params_spec,
query_params_spec: query_params_spec,
response_spec: response_spec,
path: "/v2/refunds"
})
end
@spec cancel_by_idempotency_key(
SquareUp.Client.t(),
%{},
SquareUp.TypeSpecs.cancel_payment_by_idempotency_key_request(),
%{}
) ::
SquareUp.Client.response(
SquareUp.TypeSpecs.cancel_payment_by_idempotency_key_response()
)
def cancel_by_idempotency_key(client, path_params \\ %{}, params \\ %{}, query_params \\ %{}) do
path_params_spec = schema(%{})
params_spec =
Norm.Delegate.delegate(&SquareUp.NormSchema.cancel_payment_by_idempotency_key_request/0)
query_params_spec = schema(%{})
response_spec =
{:delegate, &SquareUp.ResponseSchema.cancel_payment_by_idempotency_key_response/0}
call(client, %{
method: :post,
path_params: path_params,
params: params,
query_params: query_params,
path_params_spec: path_params_spec,
params_spec: params_spec,
query_params_spec: query_params_spec,
response_spec: response_spec,
path: "/v2/payments/cancel"
})
end
@spec complete(SquareUp.Client.t(), %{required(:payment_id) => binary()}, %{}, %{}) ::
SquareUp.Client.response(SquareUp.TypeSpecs.complete_payment_response())
def complete(client, path_params \\ %{}, params \\ %{}, query_params \\ %{}) do
path_params_spec = schema(%{payment_id: spec(is_binary())})
params_spec = schema(%{})
query_params_spec = schema(%{})
response_spec = {:delegate, &SquareUp.ResponseSchema.complete_payment_response/0}
call(client, %{
method: :post,
path_params: path_params,
params: params,
query_params: query_params,
path_params_spec: path_params_spec,
params_spec: params_spec,
query_params_spec: query_params_spec,
response_spec: response_spec,
path: "/v2/payments/{payment_id}/complete"
})
end
@spec get(SquareUp.Client.t(), %{required(:payment_id) => binary()}, %{}, %{}) ::
SquareUp.Client.response(SquareUp.TypeSpecs.get_payment_response())
def get(client, path_params \\ %{}, params \\ %{}, query_params \\ %{}) do
path_params_spec = schema(%{payment_id: spec(is_binary())})
params_spec = schema(%{})
query_params_spec = schema(%{})
response_spec = {:delegate, &SquareUp.ResponseSchema.get_payment_response/0}
call(client, %{
method: :get,
path_params: path_params,
params: params,
query_params: query_params,
path_params_spec: path_params_spec,
params_spec: params_spec,
query_params_spec: query_params_spec,
response_spec: response_spec,
path: "/v2/payments/{payment_id}"
})
end
@spec cancel(SquareUp.Client.t(), %{required(:payment_id) => binary()}, %{}, %{}) ::
SquareUp.Client.response(SquareUp.TypeSpecs.cancel_payment_response())
def cancel(client, path_params \\ %{}, params \\ %{}, query_params \\ %{}) do
path_params_spec = schema(%{payment_id: spec(is_binary())})
params_spec = schema(%{})
query_params_spec = schema(%{})
response_spec = {:delegate, &SquareUp.ResponseSchema.cancel_payment_response/0}
call(client, %{
method: :post,
path_params: path_params,
params: params,
query_params: query_params,
path_params_spec: path_params_spec,
params_spec: params_spec,
query_params_spec: query_params_spec,
response_spec: response_spec,
path: "/v2/payments/{payment_id}/cancel"
})
end
@spec create(SquareUp.Client.t(), %{}, SquareUp.TypeSpecs.create_payment_request(), %{}) ::
SquareUp.Client.response(SquareUp.TypeSpecs.create_payment_response())
def create(client, path_params \\ %{}, params \\ %{}, query_params \\ %{}) do
path_params_spec = schema(%{})
params_spec = Norm.Delegate.delegate(&SquareUp.NormSchema.create_payment_request/0)
query_params_spec = schema(%{})
response_spec = {:delegate, &SquareUp.ResponseSchema.create_payment_response/0}
call(client, %{
method: :post,
path_params: path_params,
params: params,
query_params: query_params,
path_params_spec: path_params_spec,
params_spec: params_spec,
query_params_spec: query_params_spec,
response_spec: response_spec,
path: "/v2/payments"
})
end
end
| 35.047297 | 98 | 0.67033 |
ffb1302b57a61e1191eac061253dfe1b45d4583f | 1,999 | exs | Elixir | test/realtime_signs_config_test.exs | mbta/realtime_signs | 3fd8cbc26ce2b0820e608e60fe12135dab5def69 | [
"MIT"
] | 1 | 2022-01-24T12:39:05.000Z | 2022-01-24T12:39:05.000Z | test/realtime_signs_config_test.exs | mbta/realtime_signs | 3fd8cbc26ce2b0820e608e60fe12135dab5def69 | [
"MIT"
] | 40 | 2021-05-05T10:14:25.000Z | 2022-03-31T18:34:15.000Z | test/realtime_signs_config_test.exs | mbta/realtime_signs | 3fd8cbc26ce2b0820e608e60fe12135dab5def69 | [
"MIT"
] | 1 | 2022-03-20T21:08:12.000Z | 2022-03-20T21:08:12.000Z | defmodule RealtimeSignsConfigTest do
use ExUnit.Case, async: false
import ExUnit.CaptureLog
import RealtimeSignsConfig
setup do
on_exit(fn -> Application.delete_env(:realtime_signs, :app_key) end)
end
describe "update_env/5" do
test "sets the application environment" do
assert :ok = update_env(%{"ENV_VAR" => "foo"}, :app_key, "ENV_VAR")
assert Application.get_env(:realtime_signs, :app_key) == "foo"
end
test "still returns :ok if missing, and doesn't update app environment" do
assert :ok = update_env(%{}, :app_key, "ENV_VAR")
assert Application.get_env(:realtime_signs, :app_key) == nil
end
test "converts an integer before storing in application environment" do
assert :ok = update_env(%{"ENV_VAR" => "5"}, :app_key, "ENV_VAR", type: :integer)
assert Application.get_env(:realtime_signs, :app_key) == 5
end
test "treats a boolean value as false if the environment variable isn't specified" do
assert :ok = update_env(%{}, :app_key, "ENV_VAR", type: :boolean)
assert Application.get_env(:realtime_signs, :app_key) == false
end
test "treats an empty string value as false for a boolean" do
assert :ok = update_env(%{"ENV_VAR" => ""}, :app_key, "ENV_VAR", type: :boolean)
assert Application.get_env(:realtime_signs, :app_key) == false
end
test "treats any non-empty string value as true for a boolean" do
assert :ok = update_env(%{"ENV_VAR" => "1"}, :app_key, "ENV_VAR", type: :boolean)
assert Application.get_env(:realtime_signs, :app_key) == true
end
test "logs the environment variable unless it's private" do
env = %{"ENV1" => "env1", "ENV2" => "env2"}
log =
capture_log(fn ->
:ok = update_env(env, :app_key, "ENV1")
:ok = update_env(env, :app_key, "ENV2", private?: true)
:ok = Process.sleep(50)
end)
assert log =~ ~s(ENV1="env1")
refute log =~ "ENV2"
end
end
end
| 35.070175 | 89 | 0.646823 |
ffb13830c4daff69a76bd990f501c0eae8a9ad09 | 504 | ex | Elixir | lib/hierbautberlin/geo_data/geo_place.ex | HierBautBerlin/website | 91410e7c61c1efad438fe84bf550f87b0056c440 | [
"MIT"
] | 13 | 2021-03-06T12:16:34.000Z | 2022-03-31T09:46:35.000Z | lib/hierbautberlin/geo_data/geo_place.ex | HierBautBerlin/website | 91410e7c61c1efad438fe84bf550f87b0056c440 | [
"MIT"
] | 148 | 2021-03-05T12:44:55.000Z | 2022-03-11T12:09:06.000Z | lib/hierbautberlin/geo_data/geo_place.ex | HierBautBerlin/website | 91410e7c61c1efad438fe84bf550f87b0056c440 | [
"MIT"
] | 2 | 2021-06-02T14:31:21.000Z | 2022-02-14T08:36:51.000Z | defmodule Hierbautberlin.GeoData.GeoPlace do
use Ecto.Schema
alias Geo.PostGIS.Geometry
alias Hierbautberlin.GeoData.NewsItem
schema "geo_places" do
field :external_id, :string
field :name, :string
field :city, :string
field :district, :string
field :type, :string
field :geometry, Geometry
field :geo_point, Geometry
many_to_many :news_items, NewsItem, join_through: "geo_places_news_items", on_replace: :delete
timestamps(type: :utc_datetime)
end
end
| 22.909091 | 98 | 0.730159 |
ffb13c36de6af3d270992cbbb0c0267b8e0764c2 | 616 | exs | Elixir | code_generation/mix.exs | sdrew/protox | c28d02f1626b5cd39bad7de2b415d20ebbdf76ee | [
"MIT"
] | null | null | null | code_generation/mix.exs | sdrew/protox | c28d02f1626b5cd39bad7de2b415d20ebbdf76ee | [
"MIT"
] | null | null | null | code_generation/mix.exs | sdrew/protox | c28d02f1626b5cd39bad7de2b415d20ebbdf76ee | [
"MIT"
] | null | null | null | defmodule CodeGeneration.MixProject do
use Mix.Project
def project do
[
app: :code_generation,
version: "0.1.0",
elixir: "~> 1.7",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:protox, path: ".."},
{:dialyxir, "~> 1.0", only: [:test, :dev], runtime: false},
{:credo, "~> 1.4", only: [:test, :dev], runtime: false}
]
end
end
| 20.533333 | 65 | 0.561688 |
ffb1947834080f25155e2e8cbc32802a57f9559f | 733 | ex | Elixir | lib/ex_json_schema/validator/const.ex | tinfoil/ex_json_schema | 6836d531c9adcd6abfa3a5e296d3da977f7b7fb1 | [
"MIT"
] | 301 | 2015-07-17T22:22:56.000Z | 2022-03-20T13:42:22.000Z | lib/ex_json_schema/validator/const.ex | tinfoil/ex_json_schema | 6836d531c9adcd6abfa3a5e296d3da977f7b7fb1 | [
"MIT"
] | 70 | 2015-09-30T21:19:43.000Z | 2022-02-03T10:23:07.000Z | lib/ex_json_schema/validator/const.ex | tinfoil/ex_json_schema | 6836d531c9adcd6abfa3a5e296d3da977f7b7fb1 | [
"MIT"
] | 100 | 2015-09-16T11:58:15.000Z | 2022-01-31T19:09:32.000Z | defmodule ExJsonSchema.Validator.Const do
@moduledoc """
`ExJsonSchema.Validator` implementation for `"anyOf"` attributes.
See:
https://tools.ietf.org/html/draft-wright-json-schema-validation-01#section-6.24
https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-6.1.3
"""
alias ExJsonSchema.Validator.Error
@behaviour ExJsonSchema.Validator
@impl ExJsonSchema.Validator
def validate(%{version: version}, _, {"const", const}, data, _) when version >= 6 do
do_validate(const, data)
end
def validate(_, _, _, _, _) do
[]
end
defp do_validate(const, data) do
if const == data do
[]
else
[%Error{error: %Error.Const{expected: const}}]
end
end
end
| 23.645161 | 86 | 0.684857 |
ffb1966f891de1eb1f2087e662e2044b70cc9c7d | 4,425 | exs | Elixir | apps/ewallet_db/test/ewallet_db/validator_test.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_db/test/ewallet_db/validator_test.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_db/test/ewallet_db/validator_test.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWalletDB.ValidatorTest do
use EWalletDB.SchemaCase, async: true
import Ecto.Changeset
import EWalletDB.Validator
defmodule SampleStruct do
use Ecto.Schema
schema "sample_structs" do
field(:attr1, :string)
field(:attr2, :string)
field(:attr3, :string)
end
end
describe "validate_password/1" do
test "returns {:ok, password} if the password meets requirements" do
assert validate_password("valid_password") == {:ok, "valid_password"}
end
test "returns {:error, :password_too_short, data} if the password is nil" do
assert validate_password(nil) == {:error, :password_too_short, [min_length: 8]}
end
test "returns {:error, :password_too_short, data} if the password is empty" do
assert validate_password("") == {:error, :password_too_short, [min_length: 8]}
end
test "returns {:error, :password_too_short, data} if the password is shorter than 8 chars" do
assert validate_password("short") == {:error, :password_too_short, [min_length: 8]}
end
end
describe "validate_password/2" do
test "returns valid if the password meets the requirements" do
struct = %SampleStruct{
attr1: "valid_password"
}
changeset =
struct
|> cast(%{attr1: "valid_password"}, [:attr1])
|> validate_password(:attr1)
assert changeset.valid?
end
test "returns invalid if the password is empty" do
changeset =
%SampleStruct{}
|> cast(%{attr1: ""}, [:attr1])
|> validate_password(:attr1)
refute changeset.valid?
assert changeset.errors == [{:attr1, {"must be 8 characters or more", []}}]
end
test "returns invalid if the password is shorter than 8 chars" do
changeset =
%SampleStruct{}
|> cast(%{attr1: "short"}, [:attr1])
|> validate_password(:attr1)
refute changeset.valid?
assert changeset.errors == [{:attr1, {"must be 8 characters or more", []}}]
end
end
describe "validate_email/2" do
test "returns a valid changeset if the attribute meets the requirements" do
changeset =
%SampleStruct{}
|> cast(%{attr1: "valid.email@example.com"}, [:attr1])
|> validate_email(:attr1)
assert changeset.valid?
end
test "returns an invalid changeset if the attribute does not have '@' sign" do
changeset =
%SampleStruct{}
|> cast(%{attr1: "not.an.email"}, [:attr1])
|> validate_email(:attr1)
refute changeset.valid?
end
test "returns an invalid changeset if the attribute is an empty string" do
changeset =
%SampleStruct{}
|> cast(%{attr1: ""}, [:attr1])
|> validate_email(:attr1)
refute changeset.valid?
end
test "returns an invalid changeset if the attribute is nil" do
changeset =
%SampleStruct{}
|> cast(%{attr1: nil}, [:attr1])
|> validate_email(:attr1)
refute changeset.valid?
end
end
describe "validate_different_values/3" do
test "valid if values are different" do
attrs = %{
attr1: "value",
attr2: "different_value"
}
changeset =
%SampleStruct{}
|> cast(attrs, [:attr1, :attr2])
|> validate_different_values(:attr1, :attr2)
assert changeset.valid?
end
test "returns invalid if values are the same" do
attrs = %{
attr1: "same_value",
attr2: "same_value"
}
changeset =
%SampleStruct{}
|> cast(attrs, [:attr1, :attr2])
|> validate_different_values(:attr1, :attr2)
refute changeset.valid?
assert changeset.errors == [
{:attr2, {"can't have the same value as `attr1`", [validation: :different_values]}}
]
end
end
end
| 28.548387 | 98 | 0.632316 |
ffb1ab80435093ec6cd483a95bfac62dfe7b1c91 | 1,427 | ex | Elixir | lib/cforum/messages/reindex_messages_job.ex | jrieger/cforum_ex | 61f6ce84708cb55bd0feedf69853dae64146a7a0 | [
"MIT"
] | null | null | null | lib/cforum/messages/reindex_messages_job.ex | jrieger/cforum_ex | 61f6ce84708cb55bd0feedf69853dae64146a7a0 | [
"MIT"
] | null | null | null | lib/cforum/messages/reindex_messages_job.ex | jrieger/cforum_ex | 61f6ce84708cb55bd0feedf69853dae64146a7a0 | [
"MIT"
] | null | null | null | defmodule Cforum.Messages.ReindexMessagesJob do
use Appsignal.Instrumentation.Decorators
import Ecto.Query, warn: false
alias Cforum.Repo
alias Cforum.Search
alias Cforum.Threads
alias Cforum.Messages
alias Cforum.Search.Document
alias Cforum.Messages.Message
@decorate transaction(:indexing)
def reindex_messages(start_id \\ 0) do
Repo.transaction(
fn ->
from(doc in Document,
left_join: msg in Message,
on: [message_id: doc.reference_id],
inner_join: section in assoc(doc, :search_section),
where: section.section_type == "forum",
where: is_nil(msg.message_id) or msg.deleted == true
)
|> Repo.stream()
|> Enum.each(&Search.delete_document/1)
:ok
end,
timeout: :infinity
)
do_reindex_messages(start_id - 1)
end
@decorate transaction_event(:indexing)
defp do_reindex_messages(last_id) do
mid =
from(m in Message,
select: m.message_id,
where: m.message_id > ^last_id,
where: m.deleted == false,
order_by: [asc: :message_id],
limit: 1
)
|> Repo.one()
if not is_nil(mid) do
message = Messages.get_message(mid)
thread = Threads.get_thread!(message.thread_id)
Cforum.Jobs.MessageIndexerJob.index_message(thread, message)
Process.sleep(100)
do_reindex_messages(mid)
end
end
end
| 25.945455 | 66 | 0.64541 |
ffb1ca63fb3eb359129dfb1c681d97a625ebcaf8 | 2,088 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/interconnect_attachments_scoped_list_warning_data.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/compute/lib/google_api/compute/v1/model/interconnect_attachments_scoped_list_warning_data.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/interconnect_attachments_scoped_list_warning_data.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.InterconnectAttachmentsScopedListWarningData do
@moduledoc """
## Attributes
* `key` (*type:* `String.t`, *default:* `nil`) - [Output Only] A key that provides more detail on the warning being returned. For example, for warnings where there are no results in a list request for a particular zone, this key might be scope and the key value might be the zone name. Other examples might be a key indicating a deprecated resource and a suggested replacement, or a warning about invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding).
* `value` (*type:* `String.t`, *default:* `nil`) - [Output Only] A warning data value corresponding to the key.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:key => String.t(),
:value => String.t()
}
field(:key)
field(:value)
end
defimpl Poison.Decoder,
for: GoogleApi.Compute.V1.Model.InterconnectAttachmentsScopedListWarningData do
def decode(value, options) do
GoogleApi.Compute.V1.Model.InterconnectAttachmentsScopedListWarningData.decode(value, options)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Compute.V1.Model.InterconnectAttachmentsScopedListWarningData do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.153846 | 527 | 0.748563 |
ffb1dcb33b3d3c21653f7036c9df45068341d3d1 | 2,805 | exs | Elixir | mix.exs | Adzz/money | c611d53018f5fcba7a4e85569caf8b41f57d79dd | [
"Apache-2.0"
] | null | null | null | mix.exs | Adzz/money | c611d53018f5fcba7a4e85569caf8b41f57d79dd | [
"Apache-2.0"
] | null | null | null | mix.exs | Adzz/money | c611d53018f5fcba7a4e85569caf8b41f57d79dd | [
"Apache-2.0"
] | null | null | null | defmodule Money.Mixfile do
use Mix.Project
@version "5.6.0"
def project do
[
app: :ex_money,
version: @version,
elixir: "~> 1.6",
name: "Money",
source_url: "https://github.com/kipcole9/money",
docs: docs(),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
deps: deps(),
description: description(),
package: package(),
test_coverage: [tool: ExCoveralls],
aliases: aliases(),
elixirc_paths: elixirc_paths(Mix.env()),
dialyzer: [
ignore_warnings: ".dialyzer_ignore_warnings",
plt_add_apps: ~w(inets jason mix phoenix_html)a
],
compilers: Mix.compilers()
]
end
defp description do
"Money functions for operations on and localization of a money data type."
end
defp package do
[
maintainers: ["Kip Cole"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/kipcole9/money",
"Readme" => "https://github.com/kipcole9/money/blob/v#{@version}/README.md",
"Changelog" => "https://github.com/kipcole9/money/blob/v#{@version}/CHANGELOG.md"
},
files: [
"lib",
"config",
"mix.exs",
"README.md",
"CHANGELOG.md",
"LICENSE.md"
]
]
end
def application do
[
mod: {Money.Application, [strategy: :one_for_one, name: Money.Supervisor]},
extra_applications: [:inets, :logger]
]
end
def docs do
[
source_ref: "v#{@version}",
extras: ["README.md", "CHANGELOG.md", "LICENSE.md"],
main: "readme",
groups_for_modules: groups_for_modules(),
logo: "logo.png",
skip_undefined_reference_warnings_on: ["changelog", "CHANGELOG.md", "README.md"]
]
end
defp groups_for_modules do
[
"Exchange Rates": ~r/^Money.ExchangeRates.?/,
Subscriptions: ~r/^Money.Subscription.?/
]
end
def aliases do
[]
end
defp deps do
[
{:ex_cldr_numbers, "~> 2.16"},
{:decimal, "~> 1.6 or ~> 2.0"},
{:phoenix_html, "~> 2.0 or ~> 3.0", optional: true},
{:nimble_parsec, "~> 0.5 or ~> 1.0"},
{:dialyxir, "~> 1.0", only: [:dev], runtime: false},
{:jason, "~> 1.0", optional: true},
{:stream_data, "~> 0.4", only: [:dev, :test]},
{:gringotts, "~>1.1", only: :test, optional: true},
{:benchee, "~> 1.0", optional: true, only: :dev},
{:exprof, "~> 0.2", only: :dev, runtime: false},
{:ex_doc, "~> 0.22", only: [:dev, :release]},
{:castore, "~> 0.1", optional: true},
{:certifi, "~> 2.5", optional: true}
]
end
defp elixirc_paths(:test), do: ["lib", "test", "test/support"]
defp elixirc_paths(:dev), do: ["lib", "mix"]
defp elixirc_paths(_), do: ["lib"]
end
| 26.714286 | 89 | 0.554724 |
ffb1f09f7c9435a305890c49d3c21c6a9c5264f6 | 3,057 | ex | Elixir | lib/workers/send_mention_notification.ex | hakerspeak/hakerspeak.com | efd9e75f4854fdd19fc1873300deae0b160fb629 | [
"MIT"
] | null | null | null | lib/workers/send_mention_notification.ex | hakerspeak/hakerspeak.com | efd9e75f4854fdd19fc1873300deae0b160fb629 | [
"MIT"
] | null | null | null | lib/workers/send_mention_notification.ex | hakerspeak/hakerspeak.com | efd9e75f4854fdd19fc1873300deae0b160fb629 | [
"MIT"
] | null | null | null | defmodule ChatApi.Workers.SendMentionNotification do
@moduledoc false
use Oban.Worker, queue: :mailers
import Ecto.Query, warn: false
require Logger
alias ChatApi.{Accounts, Users}
alias ChatApi.Messages
alias ChatApi.Messages.Message
alias ChatApi.Users.User
@impl Oban.Worker
@spec perform(Oban.Job.t()) :: :ok
def perform(%Oban.Job{args: %{"message" => message, "user" => user}}) do
if enabled?() && should_send_email?(user) do
Logger.info("Checking if we need to send reply email: #{inspect(message)}")
send_email(message, user)
else
Logger.info(
"Skipping @mention notification email: #{inspect(message)} (user: #{inspect(user)})"
)
end
:ok
end
@spec send_email(map()) :: :ok | :skipped | :error
def send_email(%{"user_id" => nil, "user" => nil}, _), do: :skipped
def send_email(
%{
"seen_at" => nil,
"user_id" => sender_id,
"account_id" => account_id,
"customer_id" => nil,
"conversation_id" => conversation_id
} = _message,
%{"id" => recipient_id, "email" => _email} = _user
) do
Logger.info("Sending @mention notification email!")
email =
ChatApi.Emails.send_mention_notification_email(
sender: Users.get_user_info(sender_id),
recipient: Users.get_user_info(recipient_id),
account: Accounts.get_account!(account_id),
messages: get_recent_messages(conversation_id, account_id)
)
case email do
{:ok, result} ->
Logger.info("Sent @mention notification email! #{inspect(result)}")
{:error, reason} ->
Logger.error("Failed to send @mention notification email! #{inspect(reason)}")
{:warning, reason} ->
Logger.warn(reason)
end
end
def send_email(_params), do: :error
@spec get_recent_messages(binary(), binary()) :: [Message.t()]
def get_recent_messages(conversation_id, account_id) do
conversation_id
|> Messages.list_by_conversation(%{"account_id" => account_id}, limit: 5)
|> Enum.reverse()
end
@doc """
Check that the user has a valid email before sending
"""
@spec should_send_email?(User.t()) :: boolean()
def should_send_email?(%{
"email" => email,
"disabled_at" => nil,
"archived_at" => nil
}),
do: ChatApi.Emails.Helpers.valid_format?(email)
def should_send_email?(_), do: false
@spec enabled?() :: boolean()
def enabled?() do
has_valid_email_domain?() && mention_notification_emails_enabled?()
end
@spec has_valid_email_domain? :: boolean()
def has_valid_email_domain?() do
System.get_env("DOMAIN") == "mail.heyHakerspeak.io"
end
@spec mention_notification_emails_enabled? :: boolean()
def mention_notification_emails_enabled?() do
# Should be enabled by default, unless the MENTION_NOTIFICATION_EMAILS_DISABLED is set
case System.get_env("MENTION_NOTIFICATION_EMAILS_DISABLED") do
x when x == "1" or x == "true" -> false
_ -> true
end
end
end
| 28.570093 | 92 | 0.645731 |
ffb1fb20a657ef12de060b0aaa2be63337f7e70f | 1,596 | ex | Elixir | 13-phoenix/hello/lib/hello_web/endpoint.ex | kranfix/elixir-playground | 28f1314b137eb591946f501647e76d8017070ffa | [
"MIT"
] | null | null | null | 13-phoenix/hello/lib/hello_web/endpoint.ex | kranfix/elixir-playground | 28f1314b137eb591946f501647e76d8017070ffa | [
"MIT"
] | null | null | null | 13-phoenix/hello/lib/hello_web/endpoint.ex | kranfix/elixir-playground | 28f1314b137eb591946f501647e76d8017070ffa | [
"MIT"
] | 1 | 2020-11-17T07:06:17.000Z | 2020-11-17T07:06:17.000Z | defmodule HelloWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :hello
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
@session_options [
store: :cookie,
key: "_hello_key",
signing_salt: "6Nnwua0s"
]
socket "/socket", HelloWeb.UserSocket,
websocket: true,
longpoll: false
socket "/live", Phoenix.LiveView.Socket, websocket: [connect_info: [session: @session_options]]
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :hello,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
plug Phoenix.Ecto.CheckRepoStatus, otp_app: :hello
end
plug Phoenix.LiveDashboard.RequestLogger,
param_key: "request_logger",
cookie_key: "request_logger"
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session, @session_options
plug HelloWeb.Router
end
| 29.018182 | 97 | 0.716165 |
ffb2051bbb2b19e330a84a6dda9198564f964bad | 868 | exs | Elixir | test/ref_inspector/database/init_test.exs | elixir-inspector/ref_inspector | f34485a8e32f0aaea2e3951fbb756eac14a9f8dd | [
"Apache-2.0"
] | 7 | 2018-12-22T14:41:26.000Z | 2020-05-04T08:16:11.000Z | test/ref_inspector/database/init_test.exs | elixir-inspector/ref_inspector | f34485a8e32f0aaea2e3951fbb756eac14a9f8dd | [
"Apache-2.0"
] | 2 | 2019-11-25T09:41:27.000Z | 2020-05-06T17:30:45.000Z | test/ref_inspector/database/init_test.exs | elixir-inspector/ref_inspector | f34485a8e32f0aaea2e3951fbb756eac14a9f8dd | [
"Apache-2.0"
] | null | null | null | defmodule RefInspector.Database.InitTest do
use ExUnit.Case, async: false
import ExUnit.CaptureLog
alias RefInspector.Database
setup do
app_files = Application.get_env(:ref_inspector, :database_files)
on_exit(fn ->
Application.put_env(:ref_inspector, :database_files, app_files)
end)
end
test "require instance name" do
Process.flag(:trap_exit, true)
assert {:error, "missing instance name"} = Database.start_link([])
end
test "log info when initial load failed" do
file = "something_that_is_no_file"
Application.put_env(:ref_inspector, :database_files, [file])
log =
capture_log(fn ->
{Database, [instance: :ref_inspector_init_test, startup_sync: false]}
|> start_supervised!()
|> :sys.get_state()
end)
assert log =~ ~r/Failed to load #{file}: :enoent/
end
end
| 23.459459 | 77 | 0.682028 |
ffb20d9c52037d56db3d8fb04ac5d1d49f906700 | 355 | ex | Elixir | lib/employee_reward_app_web/workers/monthly_digest_worker.ex | Kapeusz/employee_reward_app | 738d1514ec733b0e8027423e740abdbdc27716d0 | [
"PostgreSQL",
"MIT"
] | null | null | null | lib/employee_reward_app_web/workers/monthly_digest_worker.ex | Kapeusz/employee_reward_app | 738d1514ec733b0e8027423e740abdbdc27716d0 | [
"PostgreSQL",
"MIT"
] | null | null | null | lib/employee_reward_app_web/workers/monthly_digest_worker.ex | Kapeusz/employee_reward_app | 738d1514ec733b0e8027423e740abdbdc27716d0 | [
"PostgreSQL",
"MIT"
] | null | null | null | defmodule EmployeeRewardAppWeb.Workers.MonthlyDigestWorker do
use Oban.Worker,
queue: :events,
priority: 3,
max_attempts: 3,
tags: ["business"],
unique: [period: 30]
alias EmployeeRewardApp.Points
alias EmployeeRewardApp.Points.Pool
@impl Oban.Worker
def perform(_job) do
EmployeeRewardApp.Points.reset_pools()
:ok
end
end
| 19.722222 | 61 | 0.735211 |
ffb228f7b201990f937700a41e21faa466f0d613 | 855 | exs | Elixir | test/telemetry_async/shard_test.exs | pushex-project/telemetry_async | 9fcfad89e02290175b9fae04d1971207805f11ac | [
"MIT"
] | null | null | null | test/telemetry_async/shard_test.exs | pushex-project/telemetry_async | 9fcfad89e02290175b9fae04d1971207805f11ac | [
"MIT"
] | 1 | 2019-05-02T19:42:00.000Z | 2019-05-04T03:27:09.000Z | test/telemetry_async/shard_test.exs | pushex-project/telemetry_async | 9fcfad89e02290175b9fae04d1971207805f11ac | [
"MIT"
] | null | null | null | defmodule TelemetryAsync.ShardTest do
use ExUnit.Case, async: false
require Logger
import ExUnit.CaptureLog
alias TelemetryAsync.Shard
test "a Shard can be started", %{test: test} do
assert {:ok, pid} = Shard.start_link(name: test)
assert is_pid(pid)
end
test "a function can be executed in the Shard", %{test: test} do
assert {:ok, pid} = Shard.start_link(name: test)
assert capture_log(fn ->
Shard.execute(pid, fn -> Logger.info("was here") end)
Process.sleep(25)
end) =~ "was here"
end
describe "name_for_number/2" do
test "a name is generated for the number and prefix" do
assert Shard.name_for_number(0, nil) == :"#{Shard}_0"
assert Shard.name_for_number(1, nil) == :"#{Shard}_1"
assert Shard.name_for_number(0, :prefix) == :prefix_0
end
end
end
| 28.5 | 66 | 0.650292 |
ffb23b259620cd82b343ef4b07988cc419ae921a | 245 | ex | Elixir | examples/bank/lib/bank/bank_account_state.ex | OleMchls/incident | 220e46598719fc50d10c96e5a848080172bf6f9b | [
"MIT"
] | null | null | null | examples/bank/lib/bank/bank_account_state.ex | OleMchls/incident | 220e46598719fc50d10c96e5a848080172bf6f9b | [
"MIT"
] | null | null | null | examples/bank/lib/bank/bank_account_state.ex | OleMchls/incident | 220e46598719fc50d10c96e5a848080172bf6f9b | [
"MIT"
] | null | null | null | defmodule Bank.BankAccountState do
use Incident.AggregateState,
aggregate: Bank.BankAccount,
initial_state: %{
aggregate_id: nil,
account_number: nil,
balance: nil,
version: nil,
updated_at: nil
}
end
| 20.416667 | 34 | 0.657143 |
ffb242655d944efb80c4d1dab45dddcd3e6e4958 | 685 | ex | Elixir | lib/tmi/supervisor.ex | ryanwinchester/tmi.ex | e7a96e89ade2ae08429a2079f027517a86eaa9f5 | [
"Apache-2.0"
] | 11 | 2020-07-29T09:33:21.000Z | 2022-02-22T08:41:52.000Z | lib/tmi/supervisor.ex | ryanwinchester/tmi.ex | e7a96e89ade2ae08429a2079f027517a86eaa9f5 | [
"Apache-2.0"
] | 8 | 2020-07-30T03:40:21.000Z | 2022-03-25T21:51:08.000Z | lib/tmi/supervisor.ex | ryanwinchester/tmi.ex | e7a96e89ade2ae08429a2079f027517a86eaa9f5 | [
"Apache-2.0"
] | 4 | 2020-11-10T23:04:28.000Z | 2022-02-24T18:12:38.000Z | defmodule TMI.Supervisor do
@moduledoc """
TMI is a library for connecting to Twitch chat with Elixir.
See the [README](https://hexdocs.pm/tmi/readme.html) for more details.
"""
use Supervisor
def start_link(config) do
Supervisor.start_link(__MODULE__, config, name: __MODULE__)
end
@impl true
def init(config) do
conn = TMI.build_conn(config)
handler = Keyword.get(config, :handler, TMI.DefaultHandler)
children = [
{TMI, conn},
{TMI.Handlers.ConnectionHandler, conn},
{TMI.Handlers.LoginHandler, conn},
{TMI.Handlers.MessageHandler, {conn, handler}}
]
Supervisor.init(children, strategy: :one_for_one)
end
end
| 24.464286 | 72 | 0.687591 |
ffb258ca1b629911896962216e269c2b302c94e0 | 61 | ex | Elixir | lib/apiv4/repo.ex | simwms/apiv4 | c3da7407eaf3580b759f49726028439b4b8ea9d0 | [
"MIT"
] | 2 | 2016-02-25T20:12:35.000Z | 2018-01-03T00:03:12.000Z | lib/apiv4/repo.ex | simwms/apiv4 | c3da7407eaf3580b759f49726028439b4b8ea9d0 | [
"MIT"
] | 1 | 2016-01-11T04:50:39.000Z | 2016-01-12T05:00:08.000Z | lib/apiv4/repo.ex | simwms/apiv4 | c3da7407eaf3580b759f49726028439b4b8ea9d0 | [
"MIT"
] | null | null | null | defmodule Apiv4.Repo do
use Ecto.Repo, otp_app: :apiv4
end
| 15.25 | 32 | 0.754098 |
ffb270d64212dadb60bf9b0298d1f90c16f3f4a3 | 5,422 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/api/billing_info.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/api/billing_info.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/api/billing_info.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AdExchangeBuyer.V14.Api.BillingInfo do
@moduledoc """
API calls for all endpoints tagged `BillingInfo`.
"""
alias GoogleApi.AdExchangeBuyer.V14.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Returns the billing information for one account specified by account ID.
## Parameters
* `connection` (*type:* `GoogleApi.AdExchangeBuyer.V14.Connection.t`) - Connection to server
* `account_id` (*type:* `integer()`) - The account id.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AdExchangeBuyer.V14.Model.BillingInfo{}}` on success
* `{:error, info}` on failure
"""
@spec adexchangebuyer_billing_info_get(Tesla.Env.client(), integer(), keyword(), keyword()) ::
{:ok, GoogleApi.AdExchangeBuyer.V14.Model.BillingInfo.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def adexchangebuyer_billing_info_get(connection, account_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/billinginfo/{accountId}", %{
"accountId" => account_id
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.AdExchangeBuyer.V14.Model.BillingInfo{}])
end
@doc """
Retrieves a list of billing information for all accounts of the authenticated user.
## Parameters
* `connection` (*type:* `GoogleApi.AdExchangeBuyer.V14.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AdExchangeBuyer.V14.Model.BillingInfoList{}}` on success
* `{:error, info}` on failure
"""
@spec adexchangebuyer_billing_info_list(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.AdExchangeBuyer.V14.Model.BillingInfoList.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def adexchangebuyer_billing_info_list(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/billinginfo", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.AdExchangeBuyer.V14.Model.BillingInfoList{}])
end
end
| 42.692913 | 187 | 0.656953 |
ffb273dfa64b4040f558abe2120c75be815be1d7 | 11,233 | ex | Elixir | clients/cloud_functions/lib/google_api/cloud_functions/v1/model/cloud_function.ex | corp-momenti/elixir-google-api | fe1580e305789ab2ca0741791b8ffe924bd3240c | [
"Apache-2.0"
] | null | null | null | clients/cloud_functions/lib/google_api/cloud_functions/v1/model/cloud_function.ex | corp-momenti/elixir-google-api | fe1580e305789ab2ca0741791b8ffe924bd3240c | [
"Apache-2.0"
] | null | null | null | clients/cloud_functions/lib/google_api/cloud_functions/v1/model/cloud_function.ex | corp-momenti/elixir-google-api | fe1580e305789ab2ca0741791b8ffe924bd3240c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudFunctions.V1.Model.CloudFunction do
@moduledoc """
Describes a Cloud Function that contains user computation executed in response to an event. It encapsulate function and triggers configurations. Next tag: 35
## Attributes
* `availableMemoryMb` (*type:* `integer()`, *default:* `nil`) - The amount of memory in MB available for a function. Defaults to 256MB.
* `buildEnvironmentVariables` (*type:* `map()`, *default:* `nil`) - Build environment variables that shall be available during build time.
* `buildId` (*type:* `String.t`, *default:* `nil`) - Output only. The Cloud Build ID of the latest successful deployment of the function.
* `buildName` (*type:* `String.t`, *default:* `nil`) - Output only. The Cloud Build Name of the function deployment. `projects//locations//builds/`.
* `buildWorkerPool` (*type:* `String.t`, *default:* `nil`) - Name of the Cloud Build Custom Worker Pool that should be used to build the function. The format of this field is `projects/{project}/locations/{region}/workerPools/{workerPool}` where `{project}` and `{region}` are the project id and region respectively where the worker pool is defined and `{workerPool}` is the short name of the worker pool. If the project id is not the same as the function, then the Cloud Functions Service Agent (`service-@gcf-admin-robot.iam.gserviceaccount.com`) must be granted the role Cloud Build Custom Workers Builder (`roles/cloudbuild.customworkers.builder`) in the project.
* `description` (*type:* `String.t`, *default:* `nil`) - User-provided description of a function.
* `entryPoint` (*type:* `String.t`, *default:* `nil`) - The name of the function (as defined in source code) that will be executed. Defaults to the resource name suffix, if not specified. For backward compatibility, if function with given name is not found, then the system will try to use function named "function". For Node.js this is name of a function exported by the module specified in `source_location`.
* `environmentVariables` (*type:* `map()`, *default:* `nil`) - Environment variables that shall be available during function execution.
* `eventTrigger` (*type:* `GoogleApi.CloudFunctions.V1.Model.EventTrigger.t`, *default:* `nil`) - A source that fires events in response to a condition in another service.
* `httpsTrigger` (*type:* `GoogleApi.CloudFunctions.V1.Model.HttpsTrigger.t`, *default:* `nil`) - An HTTPS endpoint type of source that can be triggered via URL.
* `ingressSettings` (*type:* `String.t`, *default:* `nil`) - The ingress settings for the function, controlling what traffic can reach it.
* `labels` (*type:* `map()`, *default:* `nil`) - Labels associated with this Cloud Function.
* `maxInstances` (*type:* `integer()`, *default:* `nil`) - The limit on the maximum number of function instances that may coexist at a given time. In some cases, such as rapid traffic surges, Cloud Functions may, for a short period of time, create more instances than the specified max instances limit. If your function cannot tolerate this temporary behavior, you may want to factor in a safety margin and set a lower max instances value than your function can tolerate. See the [Max Instances](https://cloud.google.com/functions/docs/max-instances) Guide for more details.
* `minInstances` (*type:* `integer()`, *default:* `nil`) - A lower bound for the number function instances that may coexist at a given time.
* `name` (*type:* `String.t`, *default:* `nil`) - A user-defined name of the function. Function names must be unique globally and match pattern `projects/*/locations/*/functions/*`
* `network` (*type:* `String.t`, *default:* `nil`) - The VPC Network that this cloud function can connect to. It can be either the fully-qualified URI, or the short name of the network resource. If the short network name is used, the network must belong to the same project. Otherwise, it must belong to a project within the same organization. The format of this field is either `projects/{project}/global/networks/{network}` or `{network}`, where `{project}` is a project id where the network is defined, and `{network}` is the short name of the network. This field is mutually exclusive with `vpc_connector` and will be replaced by it. See [the VPC documentation](https://cloud.google.com/compute/docs/vpc) for more information on connecting Cloud projects.
* `runtime` (*type:* `String.t`, *default:* `nil`) - The runtime in which to run the function. Required when deploying a new function, optional when updating an existing function. For a complete list of possible choices, see the [`gcloud` command reference](https://cloud.google.com/sdk/gcloud/reference/functions/deploy#--runtime).
* `secretEnvironmentVariables` (*type:* `list(GoogleApi.CloudFunctions.V1.Model.SecretEnvVar.t)`, *default:* `nil`) - Secret environment variables configuration.
* `secretVolumes` (*type:* `list(GoogleApi.CloudFunctions.V1.Model.SecretVolume.t)`, *default:* `nil`) - Secret volumes configuration.
* `serviceAccountEmail` (*type:* `String.t`, *default:* `nil`) - The email of the function's service account. If empty, defaults to `{project_id}@appspot.gserviceaccount.com`.
* `sourceArchiveUrl` (*type:* `String.t`, *default:* `nil`) - The Google Cloud Storage URL, starting with `gs://`, pointing to the zip archive which contains the function.
* `sourceRepository` (*type:* `GoogleApi.CloudFunctions.V1.Model.SourceRepository.t`, *default:* `nil`) - **Beta Feature** The source repository where a function is hosted.
* `sourceToken` (*type:* `String.t`, *default:* `nil`) - Input only. An identifier for Firebase function sources. Disclaimer: This field is only supported for Firebase function deployments.
* `sourceUploadUrl` (*type:* `String.t`, *default:* `nil`) - The Google Cloud Storage signed URL used for source uploading, generated by calling [google.cloud.functions.v1.GenerateUploadUrl]. The signature is validated on write methods (Create, Update) The signature is stripped from the Function object on read methods (Get, List)
* `status` (*type:* `String.t`, *default:* `nil`) - Output only. Status of the function deployment.
* `timeout` (*type:* `String.t`, *default:* `nil`) - The function execution timeout. Execution is considered failed and can be terminated if the function is not completed at the end of the timeout period. Defaults to 60 seconds.
* `updateTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. The last update timestamp of a Cloud Function.
* `versionId` (*type:* `String.t`, *default:* `nil`) - Output only. The version identifier of the Cloud Function. Each deployment attempt results in a new version of a function being created.
* `vpcConnector` (*type:* `String.t`, *default:* `nil`) - The VPC Network Connector that this cloud function can connect to. It can be either the fully-qualified URI, or the short name of the network connector resource. The format of this field is `projects/*/locations/*/connectors/*` This field is mutually exclusive with `network` field and will eventually replace it. See [the VPC documentation](https://cloud.google.com/compute/docs/vpc) for more information on connecting Cloud projects.
* `vpcConnectorEgressSettings` (*type:* `String.t`, *default:* `nil`) - The egress settings for the connector, controlling what traffic is diverted through it.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:availableMemoryMb => integer() | nil,
:buildEnvironmentVariables => map() | nil,
:buildId => String.t() | nil,
:buildName => String.t() | nil,
:buildWorkerPool => String.t() | nil,
:description => String.t() | nil,
:entryPoint => String.t() | nil,
:environmentVariables => map() | nil,
:eventTrigger => GoogleApi.CloudFunctions.V1.Model.EventTrigger.t() | nil,
:httpsTrigger => GoogleApi.CloudFunctions.V1.Model.HttpsTrigger.t() | nil,
:ingressSettings => String.t() | nil,
:labels => map() | nil,
:maxInstances => integer() | nil,
:minInstances => integer() | nil,
:name => String.t() | nil,
:network => String.t() | nil,
:runtime => String.t() | nil,
:secretEnvironmentVariables =>
list(GoogleApi.CloudFunctions.V1.Model.SecretEnvVar.t()) | nil,
:secretVolumes => list(GoogleApi.CloudFunctions.V1.Model.SecretVolume.t()) | nil,
:serviceAccountEmail => String.t() | nil,
:sourceArchiveUrl => String.t() | nil,
:sourceRepository => GoogleApi.CloudFunctions.V1.Model.SourceRepository.t() | nil,
:sourceToken => String.t() | nil,
:sourceUploadUrl => String.t() | nil,
:status => String.t() | nil,
:timeout => String.t() | nil,
:updateTime => DateTime.t() | nil,
:versionId => String.t() | nil,
:vpcConnector => String.t() | nil,
:vpcConnectorEgressSettings => String.t() | nil
}
field(:availableMemoryMb)
field(:buildEnvironmentVariables, type: :map)
field(:buildId)
field(:buildName)
field(:buildWorkerPool)
field(:description)
field(:entryPoint)
field(:environmentVariables, type: :map)
field(:eventTrigger, as: GoogleApi.CloudFunctions.V1.Model.EventTrigger)
field(:httpsTrigger, as: GoogleApi.CloudFunctions.V1.Model.HttpsTrigger)
field(:ingressSettings)
field(:labels, type: :map)
field(:maxInstances)
field(:minInstances)
field(:name)
field(:network)
field(:runtime)
field(:secretEnvironmentVariables,
as: GoogleApi.CloudFunctions.V1.Model.SecretEnvVar,
type: :list
)
field(:secretVolumes, as: GoogleApi.CloudFunctions.V1.Model.SecretVolume, type: :list)
field(:serviceAccountEmail)
field(:sourceArchiveUrl)
field(:sourceRepository, as: GoogleApi.CloudFunctions.V1.Model.SourceRepository)
field(:sourceToken)
field(:sourceUploadUrl)
field(:status)
field(:timeout)
field(:updateTime, as: DateTime)
field(:versionId)
field(:vpcConnector)
field(:vpcConnectorEgressSettings)
end
defimpl Poison.Decoder, for: GoogleApi.CloudFunctions.V1.Model.CloudFunction do
def decode(value, options) do
GoogleApi.CloudFunctions.V1.Model.CloudFunction.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudFunctions.V1.Model.CloudFunction do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 80.235714 | 763 | 0.713345 |
ffb28824529141b47e393a0ac4d6f96bb8f37321 | 1,787 | ex | Elixir | lib/stackexchange_crawler_web.ex | chittolina/stackexchange_crawler | e0ab27eb718bbf62b50ff3b76abefe947ec3fea8 | [
"MIT"
] | null | null | null | lib/stackexchange_crawler_web.ex | chittolina/stackexchange_crawler | e0ab27eb718bbf62b50ff3b76abefe947ec3fea8 | [
"MIT"
] | 1 | 2022-02-10T19:50:54.000Z | 2022-02-10T19:50:54.000Z | lib/stackexchange_crawler_web.ex | chittolina/stackexchange_crawler | e0ab27eb718bbf62b50ff3b76abefe947ec3fea8 | [
"MIT"
] | null | null | null | defmodule StackexchangeCrawlerWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use StackexchangeCrawlerWeb, :controller
use StackexchangeCrawlerWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: StackexchangeCrawlerWeb
import Plug.Conn
import StackexchangeCrawlerWeb.Gettext
alias StackexchangeCrawlerWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/stackexchange_crawler_web/templates",
namespace: StackexchangeCrawlerWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 1, get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import StackexchangeCrawlerWeb.ErrorHelpers
import StackexchangeCrawlerWeb.Gettext
alias StackexchangeCrawlerWeb.Router.Helpers, as: Routes
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import StackexchangeCrawlerWeb.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 25.528571 | 83 | 0.716284 |
ffb29fb6187ef77d37582e161af9d61fa5a4254e | 6,037 | ex | Elixir | lib/ratchet_wrench/model.ex | hirocaster/ratchet_wrench | 67b056cf25b9f3001dd87e167db68fd5d911089e | [
"MIT"
] | 8 | 2020-05-29T03:22:02.000Z | 2022-03-18T12:16:49.000Z | lib/ratchet_wrench/model.ex | hirocaster/ratchet_wrench | 67b056cf25b9f3001dd87e167db68fd5d911089e | [
"MIT"
] | 4 | 2020-07-06T08:11:18.000Z | 2020-08-14T11:10:10.000Z | lib/ratchet_wrench/model.ex | hirocaster/ratchet_wrench | 67b056cf25b9f3001dd87e167db68fd5d911089e | [
"MIT"
] | 4 | 2020-05-26T02:47:00.000Z | 2021-05-03T15:26:12.000Z | defmodule RatchetWrench.Model do
@moduledoc """
Define struct module of record in database.
## Examples
```elixir
defmodule Data do
use RatchetWrench.Model
schema do
uuid :data_id
pk: [:data_id]
attributes data_id: {"STRING", nil},
string: {"STRING", ""},
bool: {"BOOL", nil },
int: {"INT64", nil},
float: {"FLOAT64", nil},
date: {"DATE", nil},
time_stamp: {"TIMESTAMP", nil}
end
end
```
"""
defmacro __using__(_) do
quote do
table_name =
__MODULE__
|> Atom.to_string()
|> String.split(".")
|> List.last()
|> Macro.underscore()
|> String.downcase()
|> Inflex.pluralize()
default_table_name = "#{table_name}"
Module.put_attribute(__MODULE__, :table_name, default_table_name)
Module.register_attribute(__MODULE__, :uuid, accumulate: false)
Module.register_attribute(__MODULE__, :pk, accumulate: false)
Module.register_attribute(__MODULE__, :interleave, accumulate: false)
Module.register_attribute(__MODULE__, :attributes, accumulate: true)
import RatchetWrench.Model
end
end
defmacro schema(do: block) do
do_schema(block)
end
defp do_schema(block) do
quote do
unquote(block)
table_name = Module.get_attribute(__ENV__.module, :table_name)
Module.put_attribute(__ENV__.module, :table_name, table_name)
uuid = Module.get_attribute(__ENV__.module, :uuid)
Module.put_attribute(__ENV__.module, :uuid, uuid)
pk = Module.get_attribute(__ENV__.module, :pk)
Module.put_attribute(__ENV__.module, :pk, pk)
interleave = Module.get_attribute(__ENV__.module, :interleave)
if interleave == nil do
interleave = []
Module.put_attribute(__ENV__.module, :interleave, interleave)
else
Module.put_attribute(__ENV__.module, :interleave, interleave)
end
Module.eval_quoted(__ENV__, [
RatchetWrench.Model.__defstruct__(__ENV__.module),
RatchetWrench.Model.__valid_define_uuid__!(__ENV__.module),
RatchetWrench.Model.__valid_define_pk__!(__ENV__.module),
RatchetWrench.Model.__valid_define_interleave__!(__ENV__.module),
RatchetWrench.Model.__def_helper_funcs__(__ENV__.module)
])
end
end
def __defstruct__(target) do
quote bind_quoted: [target: target] do
attributes = Module.get_attribute(target, :attributes)
fields = attributes |> Enum.map(fn {name, {_type, default}} -> {name, default} end)
defstruct fields
end
end
def __valid_define_uuid__!(mod) do
attributes = Module.get_attribute(mod, :attributes)
uuid = Module.get_attribute(mod, :uuid)
unless defined_column?(attributes, uuid) do
raise "Not define uuid in #{mod} module schema"
end
end
def __valid_define_pk__!(mod) do
attributes = Module.get_attribute(mod, :attributes)
pk = Module.get_attribute(mod, :pk)
if pk == nil do
raise "Must set pk in #{mod} module schema"
end
result =
Enum.map(pk, fn key ->
defined_column?(attributes, key)
end)
|> Enum.all?()
if result == false do
raise "Not define colum name in #{mod} module schema pk"
end
end
def __valid_define_interleave__!(mod) do
attributes = Module.get_attribute(mod, :attributes)
interleave = Module.get_attribute(mod, :interleave)
result =
Enum.map(interleave, fn key ->
defined_column?(attributes, key)
end)
|> Enum.all?()
if result == false do
raise "Not define colum name in #{mod} module schema interleave"
end
end
def defined_column?(attributes, target) do
result =
attributes
|> Enum.map(fn {name, {_type, _default}} -> "#{name}" == "#{target}" end)
|> Enum.any?()
if result == false do
false
else
true
end
end
def __def_helper_funcs__(mod) do
table_name = Module.get_attribute(mod, :table_name)
attributes = Module.get_attribute(mod, :attributes)
uuid = Module.get_attribute(mod, :uuid)
pk = Module.get_attribute(mod, :pk)
interleave = Module.get_attribute(mod, :interleave)
quote do
def __table_name__, do: unquote(table_name)
def __attributes__, do: unquote(attributes)
def __uuid__, do: unquote(uuid)
def __pk__, do: unquote(pk)
def __interleave__, do: unquote(interleave)
end
end
defmacro table_name(table_name) do
quote bind_quoted: [table_name: table_name] do
RatchetWrench.Model.__table_name__(__MODULE__, table_name)
end
end
def __table_name__(mod, table_name) do
Module.put_attribute(mod, :table_name, table_name)
end
defmacro attributes(decl) do
{list_of_attrs, _} = Code.eval_quoted(decl)
for attr <- list_of_attrs do
quote do: attribute([unquote(attr)])
end
end
defmacro attribute(decl) do
quote bind_quoted: [decl: decl] do
{name, type, default} =
case decl do
[{name, {type, default}}] -> {name, type, default}
end
RatchetWrench.Model.__attribute__(__MODULE__, name, type, default)
end
end
def __attribute__(mod, name, type, default) do
Module.put_attribute(mod, :attributes, {name, {type, default}})
end
defmacro uuid(uuid) do
quote bind_quoted: [uuid: uuid] do
RatchetWrench.Model.__uuid__(__MODULE__, uuid)
end
end
def __uuid__(mod, uuid) do
Module.put_attribute(mod, :uuid, uuid)
end
defmacro pk(pk) do
quote bind_quoted: [pk: pk] do
RatchetWrench.Model.__pk__(__MODULE__, pk)
end
end
def __pk__(mod, pk) do
Module.put_attribute(mod, :pk, pk)
end
defmacro interleave(interleave) do
quote bind_quoted: [interleave: interleave] do
RatchetWrench.Model.__interleave__(__MODULE__, interleave)
end
end
def __interleave__(mod, interleave) do
Module.put_attribute(mod, :interleave, interleave)
end
end
| 26.247826 | 89 | 0.654133 |
ffb2af4fb19dba73a0e62d2254fdc35427ef110e | 872 | ex | Elixir | lib/ecto/storage.ex | yrashk/ecto | 1462d5ad4cbb7bf74c292ec405852bc196808daf | [
"Apache-2.0"
] | 1 | 2016-08-15T21:23:28.000Z | 2016-08-15T21:23:28.000Z | lib/ecto/storage.ex | yrashk/ecto | 1462d5ad4cbb7bf74c292ec405852bc196808daf | [
"Apache-2.0"
] | null | null | null | lib/ecto/storage.ex | yrashk/ecto | 1462d5ad4cbb7bf74c292ec405852bc196808daf | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Storage do
@moduledoc """
Convenience functions around the data store of a repository.
"""
@doc """
Create the storage in the data store and return `:ok` if it was created
successfully.
Returns `{:error, :already_up}` if the storage has already been created or
`{:error, term}` in case anything else goes wrong.
"""
@spec up(Ecto.Repo.t) :: :ok | {:error, :already_up} | {:error, term}
def up(repo) do
repo.adapter.storage_up(repo.conf)
end
@doc """
Drop the storage in the data store and return `:ok` if it was dropped
successfully.
Returns `{:error, :already_down}` if the storage has already been dropped or
`{:error, term}` in case anything else goes wrong.
"""
@spec down(Ecto.Repo.t) :: :ok | {:error, :already_down} | {:error, term}
def down(repo) do
repo.adapter.storage_down(repo.conf)
end
end
| 29.066667 | 78 | 0.673165 |
ffb2b6448f890c18a76e7c3393797441a10e14f5 | 778 | ex | Elixir | lib/fawkes/handlers/help.ex | QuinnWilton/fawkes | c6d445d46b8d7bc6e0f44e9366bee8ce0441305c | [
"MIT"
] | 40 | 2020-02-24T16:21:56.000Z | 2021-12-08T23:25:41.000Z | lib/fawkes/handlers/help.ex | QuinnWilton/fawkes | c6d445d46b8d7bc6e0f44e9366bee8ce0441305c | [
"MIT"
] | 50 | 2020-03-27T02:18:17.000Z | 2022-03-23T10:38:35.000Z | lib/fawkes/handlers/help.ex | QuinnWilton/fawkes | c6d445d46b8d7bc6e0f44e9366bee8ce0441305c | [
"MIT"
] | 4 | 2020-07-26T21:31:26.000Z | 2020-11-05T14:22:24.000Z | defmodule Fawkes.Handlers.Help do
@moduledoc """
A handler for surfacing help messages about your bots handlers.
This handler is added automatically to your bot and you do not need to add
it yourself. This documentation is only here for posterity.
"""
use Fawkes.Listener
def init(bot, handlers) do
message =
handlers
|> Enum.map(& &1.help())
|> Enum.flat_map(& String.split(&1, "\n"))
|> Enum.map(&String.trim(&1))
|> Enum.reject(& &1 == "")
|> Enum.map(& String.replace(&1, ~r/^fawkes /, bot.bot_alias || "name"))
|> Enum.join("\n")
{:ok, message}
end
@help """
fawkes help - Prints this help message
"""
respond(~r/help/, fn _, event, help_msg ->
code(event, help_msg)
help_msg
end)
end
| 25.096774 | 78 | 0.613111 |
ffb2f05102b412f31eb9a46b29365e6578e07e4d | 1,126 | ex | Elixir | deps/makeup/lib/makeup/styles/html/pygments/borland.ex | arduino-man/fona_modern | 61845bbbbc46a61a50e59a97c68709f2722078a6 | [
"MIT"
] | null | null | null | deps/makeup/lib/makeup/styles/html/pygments/borland.ex | arduino-man/fona_modern | 61845bbbbc46a61a50e59a97c68709f2722078a6 | [
"MIT"
] | null | null | null | deps/makeup/lib/makeup/styles/html/pygments/borland.ex | arduino-man/fona_modern | 61845bbbbc46a61a50e59a97c68709f2722078a6 | [
"MIT"
] | null | null | null |
defmodule Makeup.Styles.HTML.BorlandStyle do
@moduledoc false
@styles %{
:error => "bg:#e3d2d2 #a61717",
:keyword => "bold #000080",
:name_attribute => "#FF0000",
:name_tag => "bold #000080",
:string => "#0000FF",
:string_char => "#800080",
:number => "#0000FF",
:operator_word => "bold",
:comment => "italic #008800",
:comment_preproc => "noitalic #008080",
:comment_special => "noitalic bold",
:generic_deleted => "bg:#ffdddd #000000",
:generic_emph => "italic",
:generic_error => "#aa0000",
:generic_heading => "#999999",
:generic_inserted => "bg:#ddffdd #000000",
:generic_output => "#888888",
:generic_prompt => "#555555",
:generic_strong => "bold",
:generic_subheading => "#aaaaaa",
:generic_traceback => "#aa0000"
}
alias Makeup.Styles.HTML.Style
@style_struct Style.make_style(
short_name: "borland",
long_name: "Borland Style",
background_color: "#ffffff",
highlight_color: "#ffffcc",
styles: @styles)
def style() do
@style_struct
end
end | 26.809524 | 47 | 0.587922 |
ffb30d8b8ef3896a92f4353b620c2b6ca1f3a093 | 3,477 | exs | Elixir | test/unit/confex/type_test.exs | circles-learning-labs/confex | 525ad3144293a0b7d287b39013fcaf5b65bfdffa | [
"MIT"
] | 299 | 2016-09-08T19:15:02.000Z | 2022-03-16T07:19:56.000Z | test/unit/confex/type_test.exs | circles-learning-labs/confex | 525ad3144293a0b7d287b39013fcaf5b65bfdffa | [
"MIT"
] | 54 | 2016-09-13T20:55:30.000Z | 2021-04-01T14:31:42.000Z | test/unit/confex/type_test.exs | circles-learning-labs/confex | 525ad3144293a0b7d287b39013fcaf5b65bfdffa | [
"MIT"
] | 37 | 2016-11-02T12:26:21.000Z | 2021-07-15T21:55:42.000Z | defmodule Confex.TypeTest do
use ExUnit.Case, async: true
alias Confex.Type
doctest Confex.Type
test "cast nil" do
for type <- [:string, :integer, :float, :boolean, :atom, :module, :list, {M, :f, []}] do
assert Type.cast(nil, type) == {:ok, nil}
end
end
test "cast string" do
assert Type.cast("my_string", :string) == {:ok, "my_string"}
end
test "cast module" do
assert Type.cast("MyModule", :module) == {:ok, MyModule}
assert Type.cast("___@@*@#", :module) == {:ok, :"Elixir.___@@*@#"}
end
test "cast integer" do
assert Type.cast("42", :integer) == {:ok, 42}
assert Type.cast("0", :integer) == {:ok, 0}
reason = ~s/can not cast "0.5" to Integer, result contains binary remainder .5/
assert Type.cast("0.5", :integer) == {:error, reason}
reason = ~s/can not cast "42ab" to Integer, result contains binary remainder ab/
assert Type.cast("42ab", :integer) == {:error, reason}
assert Type.cast("abc", :integer) == {:error, ~s/can not cast "abc" to Integer/}
end
test "cast float" do
assert Type.cast("42.5", :float) == {:ok, 42.5}
assert Type.cast("0", :float) == {:ok, 0}
assert Type.cast("7", :float) == {:ok, 7}
reason = ~s/can not cast "42.5ab" to Float, result contains binary remainder ab/
assert Type.cast("42.5ab", :float) == {:error, reason}
assert Type.cast("abc", :float) == {:error, ~s/can not cast "abc" to Float/}
end
test "cast atom" do
assert Type.cast("my_atom", :atom) == {:ok, :my_atom}
assert Type.cast("Myatom", :atom) == {:ok, :Myatom}
assert Type.cast("___@@*@#", :atom) == {:ok, :"___@@*@#"}
end
test "cast boolean" do
assert Type.cast("true", :boolean) == {:ok, true}
assert Type.cast("tRue", :boolean) == {:ok, true}
assert Type.cast("1", :boolean) == {:ok, true}
assert Type.cast("yes", :boolean) == {:ok, true}
assert Type.cast("false", :boolean) == {:ok, false}
assert Type.cast("faLse", :boolean) == {:ok, false}
assert Type.cast("0", :boolean) == {:ok, false}
assert Type.cast("no", :boolean) == {:ok, false}
reason = ~s/can not cast "unknown" to boolean, expected values are 'true', 'false', '1', '0', 'yes' or 'no'/
assert Type.cast("unknown", :boolean) == {:error, reason}
end
test "cast list" do
assert Type.cast("hello", :list) == {:ok, ["hello"]}
assert Type.cast("1,2,3", :list) == {:ok, ["1", "2", "3"]}
assert Type.cast("a,b,C", :list) == {:ok, ["a", "b", "C"]}
assert Type.cast(" a, b, C ", :list) == {:ok, ["a", "b", "C"]}
assert Type.cast(" a, b, C, ", :list) == {:ok, ["a", "b", "C", ""]}
end
test "cast charlist" do
assert Type.cast("my_atom", :charlist) == {:ok, 'my_atom'}
assert Type.cast("Myatom", :charlist) == {:ok, 'Myatom'}
assert Type.cast("___@@*@-", :charlist) == {:ok, '___@@*@-'}
end
test "cast with {m,f,a}" do
assert Type.cast("hello", {__MODULE__, :do_cast, [:ok]}) == {:ok, "hello"}
assert Type.cast("hello", {__MODULE__, :do_cast, [:error]}) == {:error, "generic reason"}
reason =
"expected `Elixir.Confex.TypeTest.do_cast/2` to return either " <>
"`{:ok, value}` or `{:error, reason}` tuple, got: `:other_return`"
assert Type.cast("hello", {__MODULE__, :do_cast, [:other_return]}) == {:error, reason}
end
def do_cast(value, :ok), do: {:ok, value}
def do_cast(_value, :error), do: {:error, "generic reason"}
def do_cast(_value, _), do: :other_return
end
| 36.989362 | 112 | 0.577509 |
ffb31c8cf588a8928154b2811b82365437ea987b | 646 | ex | Elixir | lib/towwwer/websites/report.ex | juhalehtonen/towwwer | 552fe57e93adc9c521a4c6a8ef550b84efec32ca | [
"MIT"
] | 1 | 2019-05-03T13:39:40.000Z | 2019-05-03T13:39:40.000Z | lib/towwwer/websites/report.ex | juhalehtonen/towwwer | 552fe57e93adc9c521a4c6a8ef550b84efec32ca | [
"MIT"
] | 13 | 2019-03-27T05:40:41.000Z | 2019-05-24T10:43:31.000Z | lib/towwwer/websites/report.ex | juhalehtonen/towwwer | 552fe57e93adc9c521a4c6a8ef550b84efec32ca | [
"MIT"
] | null | null | null | defmodule Towwwer.Websites.Report do
@moduledoc false
use Ecto.Schema
import Ecto.Changeset
alias Towwwer.Websites.Monitor
schema "reports" do
field(:data, :map)
field(:mobile_data, :map)
field(:wpscan_data, :map)
belongs_to(:monitor, Monitor)
timestamps()
end
@doc false
def changeset(report, %{monitor: _monitor} = attrs) do
report
|> cast(attrs, [:data, :mobile_data, :wpscan_data])
|> put_assoc(:monitor, attrs.monitor)
end
def changeset(report, attrs) do
report
|> cast(attrs, [:data, :mobile_data, :wpscan_data])
|> add_error(:no_monitor, "No monitor provided")
end
end
| 22.275862 | 56 | 0.673375 |
ffb326ed600564c413febd15057bb19e504e4ed2 | 871 | exs | Elixir | kousa/test/beef/scheduled_room_test.exs | LeonardSSH/dogehouse | 584055ad407bc37fa35cdf36ebb271622e29d436 | [
"MIT"
] | 9 | 2021-03-17T03:56:18.000Z | 2021-09-24T22:45:14.000Z | kousa/test/beef/scheduled_room_test.exs | ActuallyTomas/dogehouse | 8c3d2cd1d7e99e173f0658759467a391c4a90c4e | [
"MIT"
] | 12 | 2021-07-06T12:51:13.000Z | 2022-03-16T12:38:18.000Z | kousa/test/beef/scheduled_room_test.exs | ActuallyTomas/dogehouse | 8c3d2cd1d7e99e173f0658759467a391c4a90c4e | [
"MIT"
] | 4 | 2021-07-15T20:33:50.000Z | 2022-03-27T12:46:47.000Z | defmodule Kousa.Beef.ScheduledRoomsTest do
# allow tests to run in parallel
use ExUnit.Case, async: true
use KousaTest.Support.EctoSandbox
alias KousaTest.Support.Factory
alias Beef.Repo
alias Beef.Schemas.User
alias Beef.Schemas.ScheduledRoom
alias Beef.ScheduledRooms
defp create_user(_) do
{:ok, user: Factory.create(User)}
end
describe "you can create a scheduled room" do
setup :create_user
@scheduled_room_input %{
name: "test scheduled room",
description: "",
numAttendees: 0,
scheduledFor:
DateTime.utc_now()
|> Timex.shift(days: 1)
|> Timex.format!("{ISO:Extended:Z}")
}
test "with ISO date", %{user: user} do
{:ok, sr} = ScheduledRooms.insert(Map.put(@scheduled_room_input, :creatorId, user.id))
assert [^sr] = Repo.all(ScheduledRoom)
end
end
end
| 24.194444 | 92 | 0.667049 |
ffb336a66c46bb1ea8cc2218f5c1b28ea6cff544 | 1,701 | ex | Elixir | clients/firebase_hosting/lib/google_api/firebase_hosting/v1beta1/model/cert_http_challenge.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/firebase_hosting/lib/google_api/firebase_hosting/v1beta1/model/cert_http_challenge.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/firebase_hosting/lib/google_api/firebase_hosting/v1beta1/model/cert_http_challenge.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.FirebaseHosting.V1beta1.Model.CertHttpChallenge do
@moduledoc """
Represents an HTTP certificate challenge.
## Attributes
* `path` (*type:* `String.t`, *default:* `nil`) - The URL path on which to serve the specified token to satisfy the
certificate challenge.
* `token` (*type:* `String.t`, *default:* `nil`) - The token to serve at the specified URL path to satisfy the certificate
challenge.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:path => String.t(),
:token => String.t()
}
field(:path)
field(:token)
end
defimpl Poison.Decoder, for: GoogleApi.FirebaseHosting.V1beta1.Model.CertHttpChallenge do
def decode(value, options) do
GoogleApi.FirebaseHosting.V1beta1.Model.CertHttpChallenge.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.FirebaseHosting.V1beta1.Model.CertHttpChallenge do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.711538 | 126 | 0.728983 |
ffb349781ee3568e61b03b48a04213b968b152fb | 166 | exs | Elixir | test/controllers/page_controller_test.exs | marcaddeo/marc.cx | 2c858e640c2e19608495be8bf96e0a89b5e58ac7 | [
"MIT"
] | null | null | null | test/controllers/page_controller_test.exs | marcaddeo/marc.cx | 2c858e640c2e19608495be8bf96e0a89b5e58ac7 | [
"MIT"
] | null | null | null | test/controllers/page_controller_test.exs | marcaddeo/marc.cx | 2c858e640c2e19608495be8bf96e0a89b5e58ac7 | [
"MIT"
] | null | null | null | defmodule MarcCx.PageControllerTest do
use MarcCx.ConnCase
test "GET /" do
conn = get conn(), "/"
assert html_response(conn, 200) =~ "marc.cx"
end
end
| 18.444444 | 48 | 0.662651 |
ffb367d7c2d139054e5485672945604eb28cc1f5 | 1,808 | exs | Elixir | test/subscription_handler_test.exs | robktek/poxa | 1c330789735f9bc1e91c3dfa7a10edb3aff50aab | [
"MIT"
] | null | null | null | test/subscription_handler_test.exs | robktek/poxa | 1c330789735f9bc1e91c3dfa7a10edb3aff50aab | [
"MIT"
] | null | null | null | test/subscription_handler_test.exs | robktek/poxa | 1c330789735f9bc1e91c3dfa7a10edb3aff50aab | [
"MIT"
] | null | null | null | defmodule Poxa.SubscriptionHandlerTest do
use ExUnit.Case, async: true
alias Poxa.Event
alias Poxa.Channel
use Mimic
import Poxa.SubscriptionHandler
setup do
stub(Event)
:ok
end
test "disconnected" do
expect(Channel, :occupied?, fn "channel_1" -> false end)
expect(Channel, :occupied?, fn "channel_2" -> true end)
expect(Event, :notify, fn :channel_vacated, %{channel: "channel_1"} -> :ok end)
assert {:ok, []} == handle_event(%{event: :disconnected, channels: ~w(channel_1 channel_2), socket_id: "socket_id"}, [])
end
test "subscribed sends channel_occupied event on first subscription" do
expect(Channel, :subscription_count, fn "my_channel" -> 1 end)
expect(Event, :notify, fn :channel_occupied, %{channel: "my_channel"} -> :ok end)
assert {:ok, []} == handle_event(%{event: :subscribed, channel: "my_channel", socket_id: "socket_id"}, [])
end
test "subscribed does not send channel_occupied event other than first subscription" do
expect(Channel, :subscription_count, fn "my_channel" -> 2 end)
reject(&Event.notify/2)
assert {:ok, []} == handle_event(%{event: :subscribed, channel: "my_channel", socket_id: "socket_id"}, [])
end
test "unsubscribed sends channel_vacated event" do
expect(Channel, :occupied?, fn "my_channel" -> false end)
expect(Event, :notify, fn :channel_vacated, %{channel: "my_channel"} -> :ok end)
assert {:ok, []} == handle_event(%{event: :unsubscribed, channel: "my_channel", socket_id: "socket_id"}, [])
end
test "unsubscribed does not send channel_vacated event" do
expect(Channel, :occupied?, fn "my_channel" -> true end)
reject(&Event.notify/2)
assert {:ok, []} == handle_event(%{event: :unsubscribed, channel: "my_channel", socket_id: "socket_id"}, [])
end
end
| 38.468085 | 124 | 0.683075 |
ffb3adf3737d3ee95ecc2258c9186cf999070011 | 5,729 | ex | Elixir | apps/ex_wire/lib/ex_wire/packet/capability/par/block_bodies.ex | wolflee/mana | db66dac85addfaad98d40da5bd4082b3a0198bb1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 152 | 2018-10-27T04:52:03.000Z | 2022-03-26T10:34:00.000Z | apps/ex_wire/lib/ex_wire/packet/capability/par/block_bodies.ex | wolflee/mana | db66dac85addfaad98d40da5bd4082b3a0198bb1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 270 | 2018-04-14T07:34:57.000Z | 2018-10-25T18:10:45.000Z | apps/ex_wire/lib/ex_wire/packet/capability/par/block_bodies.ex | wolflee/mana | db66dac85addfaad98d40da5bd4082b3a0198bb1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 25 | 2018-10-27T12:15:13.000Z | 2022-01-25T20:31:14.000Z | defmodule ExWire.Packet.Capability.Par.BlockBodies do
@moduledoc """
Par Wire Packet for getting block bodies from a peer.
```
**BlockBodies** [`+0x06`, [`transactions_0`, `uncles_0`] , ...]
Reply to `GetBlockBodies`. The items in the list (following the message ID) are
some of the blocks, minus the header, in the format described in the main Ethereum
specification, previously asked for in a `GetBlockBodies` message. This may
validly contain no items if no blocks were able to be returned for the
`GetBlockBodies` query.
```
"""
require Logger
alias ExWire.Struct.Block
@behaviour ExWire.Packet
@type t :: %__MODULE__{
blocks: [Block.t()]
}
defstruct [
:blocks
]
@spec new([Block.t()]) :: t()
def new(block_structs) do
%__MODULE__{
blocks: block_structs
}
end
@doc """
Returns the relative message id offset for this message.
This will help determine what its message ID is relative to other Packets in the same Capability.
"""
@impl true
@spec message_id_offset() :: 6
def message_id_offset do
0x06
end
@doc """
Given a BlockBodies packet, serializes for transport over Eth Wire Protocol.
## Examples
iex> %ExWire.Packet.Capability.Par.BlockBodies{
...> blocks: [
...> %ExWire.Struct.Block{transactions_rlp: [[<<5>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>]], ommers_rlp: [[<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>]]},
...> %ExWire.Struct.Block{transactions_rlp: [[<<6>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>]], ommers_rlp: [[<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>]]}
...> ]
...> }
...> |> ExWire.Packet.Capability.Par.BlockBodies.serialize()
[
[
[[<<5>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>]],
[[<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>]]
],
[
[[<<6>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>]],
[[<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>]]
]
]
"""
@impl true
@spec serialize(t) :: ExRLP.t()
def serialize(packet = %__MODULE__{}) do
for block <- packet.blocks, do: Block.serialize(block)
end
@doc """
Given an RLP-encoded BlockBodies packet from Eth Wire Protocol, decodes into
a `BlockBodies` struct.
## Examples
iex> ExWire.Packet.Capability.Par.BlockBodies.deserialize([ [[[<<5>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>]], [[<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>]]], [[[<<6>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>]], [[<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>]]] ])
%ExWire.Packet.Capability.Par.BlockBodies{
blocks: [
%ExWire.Struct.Block{
transactions_rlp: [[<<5>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>]],
transactions: [%Blockchain.Transaction{nonce: 5, gas_price: 6, gas_limit: 7, to: <<1::160>>, value: 8, v: 27, r: 9, s: 10, data: "hi"}],
ommers_rlp: [[<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>]],
ommers: [%Block.Header{parent_hash: <<1::256>>, ommers_hash: <<2::256>>, beneficiary: <<3::160>>, state_root: <<4::256>>, transactions_root: <<5::256>>, receipts_root: <<6::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<7::256>>, nonce: <<8::64>>}]
},
%ExWire.Struct.Block{
transactions_rlp: [[<<6>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>]],
transactions: [%Blockchain.Transaction{nonce: 6, gas_price: 6, gas_limit: 7, to: <<1::160>>, value: 8, v: 27, r: 9, s: 10, data: "hi"}],
ommers_rlp: [[<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>]],
ommers: [%Block.Header{parent_hash: <<1::256>>, ommers_hash: <<2::256>>, beneficiary: <<3::160>>, state_root: <<4::256>>, transactions_root: <<5::256>>, receipts_root: <<6::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<7::256>>, nonce: <<8::64>>}]
}
]
}
"""
@impl true
@spec deserialize(ExRLP.t()) :: t
def deserialize(rlp) do
blocks = for block <- rlp, do: Block.deserialize(block)
%__MODULE__{
blocks: blocks
}
end
@doc """
Handles a BlockBodies message. This is when we have received
a given set of blocks back from a peer.
## Examples
iex> %ExWire.Packet.Capability.Par.BlockBodies{blocks: []}
...> |> ExWire.Packet.Capability.Par.BlockBodies.handle()
:ok
"""
@impl true
@spec handle(ExWire.Packet.packet()) :: ExWire.Packet.handle_response()
def handle(packet = %__MODULE__{}) do
:ok = Logger.info("[Packet] Peer sent #{Enum.count(packet.blocks)} block(s).")
:ok
end
end
| 44.757813 | 517 | 0.498342 |
ffb3de4dcbed7946458d7c8930e3bce1d46de390 | 4,977 | ex | Elixir | clients/vm_migration/lib/google_api/vm_migration/v1/model/compute_engine_target_defaults.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/vm_migration/lib/google_api/vm_migration/v1/model/compute_engine_target_defaults.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/vm_migration/lib/google_api/vm_migration/v1/model/compute_engine_target_defaults.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.VMMigration.V1.Model.ComputeEngineTargetDefaults do
@moduledoc """
ComputeEngineTargetDefaults is a collection of details for creating a VM in a target Compute Engine project.
## Attributes
* `additionalLicenses` (*type:* `list(String.t)`, *default:* `nil`) - Additional licenses to assign to the VM.
* `appliedLicense` (*type:* `GoogleApi.VMMigration.V1.Model.AppliedLicense.t`, *default:* `nil`) - Output only. The OS license returned from the adaptation module report.
* `bootOption` (*type:* `String.t`, *default:* `nil`) - Output only. The VM Boot Option, as set in the source vm.
* `computeScheduling` (*type:* `GoogleApi.VMMigration.V1.Model.ComputeScheduling.t`, *default:* `nil`) - Compute instance scheduling information (if empty default is used).
* `diskType` (*type:* `String.t`, *default:* `nil`) - The disk type to use in the VM.
* `labels` (*type:* `map()`, *default:* `nil`) - A map of labels to associate with the VM.
* `licenseType` (*type:* `String.t`, *default:* `nil`) - The license type to use in OS adaptation.
* `machineType` (*type:* `String.t`, *default:* `nil`) - The machine type to create the VM with.
* `machineTypeSeries` (*type:* `String.t`, *default:* `nil`) - The machine type series to create the VM with.
* `metadata` (*type:* `map()`, *default:* `nil`) - The metadata key/value pairs to assign to the VM.
* `networkInterfaces` (*type:* `list(GoogleApi.VMMigration.V1.Model.NetworkInterface.t)`, *default:* `nil`) - List of NICs connected to this VM.
* `networkTags` (*type:* `list(String.t)`, *default:* `nil`) - A map of network tags to associate with the VM.
* `secureBoot` (*type:* `boolean()`, *default:* `nil`) - Defines whether the instance has Secure Boot enabled. This can be set to true only if the vm boot option is EFI.
* `serviceAccount` (*type:* `String.t`, *default:* `nil`) - The service account to associate the VM with.
* `targetProject` (*type:* `String.t`, *default:* `nil`) - The full path of the resource of type TargetProject which represents the Compute Engine project in which to create this VM.
* `vmName` (*type:* `String.t`, *default:* `nil`) - The name of the VM to create.
* `zone` (*type:* `String.t`, *default:* `nil`) - The zone in which to create the VM.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:additionalLicenses => list(String.t()) | nil,
:appliedLicense => GoogleApi.VMMigration.V1.Model.AppliedLicense.t() | nil,
:bootOption => String.t() | nil,
:computeScheduling => GoogleApi.VMMigration.V1.Model.ComputeScheduling.t() | nil,
:diskType => String.t() | nil,
:labels => map() | nil,
:licenseType => String.t() | nil,
:machineType => String.t() | nil,
:machineTypeSeries => String.t() | nil,
:metadata => map() | nil,
:networkInterfaces => list(GoogleApi.VMMigration.V1.Model.NetworkInterface.t()) | nil,
:networkTags => list(String.t()) | nil,
:secureBoot => boolean() | nil,
:serviceAccount => String.t() | nil,
:targetProject => String.t() | nil,
:vmName => String.t() | nil,
:zone => String.t() | nil
}
field(:additionalLicenses, type: :list)
field(:appliedLicense, as: GoogleApi.VMMigration.V1.Model.AppliedLicense)
field(:bootOption)
field(:computeScheduling, as: GoogleApi.VMMigration.V1.Model.ComputeScheduling)
field(:diskType)
field(:labels, type: :map)
field(:licenseType)
field(:machineType)
field(:machineTypeSeries)
field(:metadata, type: :map)
field(:networkInterfaces, as: GoogleApi.VMMigration.V1.Model.NetworkInterface, type: :list)
field(:networkTags, type: :list)
field(:secureBoot)
field(:serviceAccount)
field(:targetProject)
field(:vmName)
field(:zone)
end
defimpl Poison.Decoder, for: GoogleApi.VMMigration.V1.Model.ComputeEngineTargetDefaults do
def decode(value, options) do
GoogleApi.VMMigration.V1.Model.ComputeEngineTargetDefaults.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.VMMigration.V1.Model.ComputeEngineTargetDefaults do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 52.389474 | 186 | 0.681133 |
ffb3ebe2b3d1acdc42314e24f387a504d43507fb | 849 | ex | Elixir | lib/chess_app/application.ex | leobessa/exchess | 289819d183f3001dddf56810c36298fa669c3a06 | [
"MIT"
] | 3 | 2017-06-02T20:47:07.000Z | 2018-05-25T11:17:12.000Z | lib/chess_app/application.ex | leobessa/exchess | 289819d183f3001dddf56810c36298fa669c3a06 | [
"MIT"
] | null | null | null | lib/chess_app/application.ex | leobessa/exchess | 289819d183f3001dddf56810c36298fa669c3a06 | [
"MIT"
] | null | null | null | defmodule ChessApp.Application do
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
# Start the Ecto repository
supervisor(ChessApp.Repo, []),
# Start the endpoint when the application starts
supervisor(ChessApp.Web.Endpoint, []),
# Start your own worker by calling: ChessApp.Worker.start_link(arg1, arg2, arg3)
# worker(ChessApp.Worker, [arg1, arg2, arg3]),
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: ChessApp.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 33.96 | 86 | 0.709069 |
ffb443bc753a3eaf422f671638fcd21d1846c3c1 | 571 | ex | Elixir | apps/ewallet_db/lib/ewallet_db/user_query.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | 2 | 2019-07-13T05:49:03.000Z | 2021-08-19T23:58:23.000Z | apps/ewallet_db/lib/ewallet_db/user_query.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_db/lib/ewallet_db/user_query.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | 3 | 2018-05-08T17:15:42.000Z | 2021-11-10T04:08:33.000Z | defmodule EWalletDB.UserQuery do
@moduledoc """
Helper functions to manipulate a `EWalletDB.User`'s query.
"""
import Ecto.Query
alias EWalletDB.{Membership, User}
@doc """
Scopes the given user query to users with one or more membership only.
If a `queryable` is not given, it automatically creates a new User query.
"""
def where_has_membership(queryable \\ User) do
queryable
|> join(:inner, [u], m in Membership, u.id == m.user_id)
|> distinct(true)
|> select([c], c) # Returns only the User struct, not the Memberships
end
end
| 30.052632 | 75 | 0.688266 |
ffb459caae6d26e7e9fc415577b341ce25478cd0 | 1,595 | ex | Elixir | lib/phoenix_elixir_hello_world_web/views/error_helpers.ex | gleidsonduarte/phoenix-elixir-hello-world | b89f7682ac63df8fb8235b2f426b8a82e4cec826 | [
"MIT"
] | null | null | null | lib/phoenix_elixir_hello_world_web/views/error_helpers.ex | gleidsonduarte/phoenix-elixir-hello-world | b89f7682ac63df8fb8235b2f426b8a82e4cec826 | [
"MIT"
] | null | null | null | lib/phoenix_elixir_hello_world_web/views/error_helpers.ex | gleidsonduarte/phoenix-elixir-hello-world | b89f7682ac63df8fb8235b2f426b8a82e4cec826 | [
"MIT"
] | null | null | null | defmodule PhoenixElixirHelloWorldWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error),
class: "invalid-feedback",
phx_feedback_for: input_id(form, field)
)
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(PhoenixElixirHelloWorldWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(PhoenixElixirHelloWorldWeb.Gettext, "errors", msg, opts)
end
end
end
| 33.229167 | 92 | 0.675235 |
ffb4809466eded4adf852ae4f3a8316f0a946859 | 4,528 | ex | Elixir | lib/components/menu_bar/menu_float_button.ex | axelson/scenic-widget-contrib | 951dc6f677d57f297739c03ce5aeab6f506d48d9 | [
"Apache-2.0"
] | null | null | null | lib/components/menu_bar/menu_float_button.ex | axelson/scenic-widget-contrib | 951dc6f677d57f297739c03ce5aeab6f506d48d9 | [
"Apache-2.0"
] | null | null | null | lib/components/menu_bar/menu_float_button.ex | axelson/scenic-widget-contrib | 951dc6f677d57f297739c03ce5aeab6f506d48d9 | [
"Apache-2.0"
] | null | null | null | defmodule ScenicWidgets.MenuBar.FloatButton do
# use Scenic.Component
# require Logger
# @moduledoc """
# This module is really not that different from a normal Scenic Button,
# just customized a little bit.
# """
# def validate(%{label: _l, menu_index: _n, frame: _f, margin: _m, font: _fs} = data) do
# #Logger.debug "#{__MODULE__} accepted params: #{inspect data}"
# {:ok, data}
# end
# def init(scene, args, opts) do
# #Logger.debug "#{__MODULE__} initializing..."
# theme = QuillEx.Utils.Themes.theme(opts)
# init_graph = render(args, theme)
# init_scene = scene
# |> assign(graph: init_graph)
# |> assign(frame: args.frame)
# |> assign(theme: theme)
# |> assign(state: %{
# mode: :inactive,
# font: args.font,
# menu_index: args.menu_index})
# |> push_graph(init_graph)
# request_input(init_scene, [:cursor_pos, :cursor_button])
# {:ok, init_scene}
# end
# def render(args, theme) do
# {_width, height} = args.frame.size
# # https://github.com/boydm/scenic/blob/master/lib/scenic/component/button.ex#L200
# vpos = height/2 + (args.font.ascent/2) + (args.font.descent/3)
# Scenic.Graph.build()
# |> Scenic.Primitives.group(fn graph ->
# graph
# |> Scenic.Primitives.rect(args.frame.size,
# id: :background,
# fill: theme.active)
# |> Scenic.Primitives.text(args.label,
# id: :label,
# font: :ibm_plex_mono,
# font_size: args.font.size,
# translate: {args.margin, vpos},
# fill: theme.text)
# end, [
# id: {:float_button, args.menu_index},
# translate: args.frame.pin
# ])
# end
# def handle_input({:cursor_pos, {x, y} = coords}, _context, scene) do
# bounds = Scenic.Graph.bounds(scene.assigns.graph)
# theme = scene.assigns.theme
# new_graph =
# if coords |> QuillEx.Utils.HoverUtils.inside?(bounds) do
# GenServer.cast(QuillEx.GUI.Components.MenuBar, {:hover, scene.assigns.state.menu_index})
# scene.assigns.graph
# #TODO and change text to black
# |> Scenic.Graph.modify(:background, &Scenic.Primitives.update_opts(&1, fill: theme.highlight))
# else
# # GenServer.cast(QuillEx.GUI.Components.MenuBar, {:cancel, {:hover, scene.assigns.state.menu_index}})
# scene.assigns.graph
# |> Scenic.Graph.modify(:background, &Scenic.Primitives.update_opts(&1, fill: theme.active))
# end
# new_scene = scene
# |> assign(graph: new_graph)
# |> push_graph(new_graph)
# {:noreply, new_scene}
# end
# #TODO accept clicks, send msg bck up to menu bar??
# def handle_input({:cursor_pos, {x, y} = coords}, _context, scene) do
# bounds = Scenic.Graph.bounds(scene.assigns.graph)
# new_graph =
# if coords |> QuillEx.Utils.HoverUtils.inside?(bounds) do
# GenServer.cast(QuillEx.GUI.Components.MenuBar, {:hover, scene.assigns.state.menu_index})
# scene.assigns.graph
# |> Scenic.Graph.modify(:background, &Scenic.Primitives.update_opts(&1, fill: :green))
# else
# # GenServer.cast(QuillEx.GUI.Components.MenuBar, {:cancel, {:hover, scene.assigns.state.menu_index}})
# scene.assigns.graph
# |> Scenic.Graph.modify(:background, &Scenic.Primitives.update_opts(&1, fill: :blue))
# end
# new_scene = scene
# |> assign(graph: new_graph)
# |> push_graph(new_graph)
# {:noreply, new_scene}
# end
# def handle_input({:cursor_button, {:btn_left, 0, [], click_coords}}, _context, scene) do
# bounds = Scenic.Graph.bounds(scene.assigns.graph)
# if click_coords |> QuillEx.Utils.HoverUtils.inside?(bounds) do
# GenServer.cast(QuillEx.GUI.Components.MenuBar, {:click, scene.assigns.state.menu_index})
# end
# {:noreply, scene}
# end
# def handle_input(input, _context, scene) do
# # Logger.debug "#{__MODULE__} ignoring input: #{inspect input}..."
# {:noreply, scene}
# end
end | 37.733333 | 119 | 0.558746 |
ffb483e327ba5accc9aa48e456ee5b38f5c0933c | 1,051 | exs | Elixir | test/cog/chat/slack/templates/common/self_registration_test.exs | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 1,003 | 2016-02-23T17:21:12.000Z | 2022-02-20T14:39:35.000Z | test/cog/chat/slack/templates/common/self_registration_test.exs | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 906 | 2016-02-22T22:54:19.000Z | 2022-03-11T15:19:43.000Z | test/cog/chat/slack/templates/common/self_registration_test.exs | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 95 | 2016-02-23T13:42:31.000Z | 2021-11-30T14:39:55.000Z | defmodule Cog.Chat.Slack.Templates.Common.SelfRegistrationTest do
use Cog.TemplateCase
test "self_registration_success renders properly" do
data = %{"mention_name" => "@cog",
"first_name" => "Cog",
"username" => "cog"}
expected = """
@cog: Hello Cog! It's great to meet you! You're the proud owner of a shiny new Cog account named 'cog'.
""" |> String.strip
assert_rendered_template(:slack, :common, "self-registration-success", data, expected)
end
test "self_registration_failed renders properly" do
data = %{"mention_name" => "@mystery_user",
"display_name" => "Slack"}
expected = """
@mystery_user: Unfortunately I was unable to automatically create a Cog account for your Slack chat handle. Only users with Cog accounts can interact with me.
You'll need to ask a Cog administrator to investigate the situation and set up your account.
""" |> String.strip
assert_rendered_template(:slack, :common, "self-registration-failed", data, expected)
end
end
| 38.925926 | 162 | 0.686013 |
ffb495f9d3e1d803cc53a8e3723d3bb3235755c6 | 1,610 | ex | Elixir | recallr/lib/recallr_web/endpoint.ex | myfoundea/liveview_march_2021 | 8aef6225271bbc8e9985746505d74dc850f295fb | [
"MIT"
] | null | null | null | recallr/lib/recallr_web/endpoint.ex | myfoundea/liveview_march_2021 | 8aef6225271bbc8e9985746505d74dc850f295fb | [
"MIT"
] | 1 | 2021-03-24T14:49:29.000Z | 2021-03-24T14:49:29.000Z | recallr/lib/recallr_web/endpoint.ex | myfoundea/liveview_march_2021 | 8aef6225271bbc8e9985746505d74dc850f295fb | [
"MIT"
] | 5 | 2021-03-21T19:45:38.000Z | 2021-03-24T14:53:07.000Z | defmodule RecallrWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :recallr
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
@session_options [
store: :cookie,
key: "_recallr_key",
signing_salt: "fU2KhT6T"
]
socket "/socket", RecallrWeb.UserSocket,
websocket: true,
longpoll: false
socket "/live", Phoenix.LiveView.Socket, websocket: [connect_info: [session: @session_options]]
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :recallr,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
plug Phoenix.Ecto.CheckRepoStatus, otp_app: :recallr
end
plug Phoenix.LiveDashboard.RequestLogger,
param_key: "request_logger",
cookie_key: "request_logger"
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session, @session_options
plug RecallrWeb.Router
end
| 29.272727 | 97 | 0.718634 |
ffb4ac988248496c0e01c5979cd716f2f0ce3aaf | 5,532 | ex | Elixir | lib/mono_repo/build.ex | alexander-uljev/mono_repo | 577d989a0ada0fa5262e736e3f455b9cc843f247 | [
"MIT"
] | 1 | 2020-05-05T22:28:46.000Z | 2020-05-05T22:28:46.000Z | lib/mono_repo/build.ex | alexander-uljev/mono_repo | 577d989a0ada0fa5262e736e3f455b9cc843f247 | [
"MIT"
] | null | null | null | lib/mono_repo/build.ex | alexander-uljev/mono_repo | 577d989a0ada0fa5262e736e3f455b9cc843f247 | [
"MIT"
] | null | null | null | defmodule MonoRepo.Build do
@moduledoc """
This module holds a set of search path functions for convinient building of
paths to parent applications.
These functions take a parent application's name as argument and return a
string of unexpanded path to parent application relative to caller's directory
. For example: if *root_app* includes *child_app* as tier 1 child then calling
any function will return *"../../specific_path"*.
"""
import File, only: [exists?: 1]
@doc """
Searches for the root or parent application and returns a path to it's build
directory.
Builds a path to parent application's build directory(_build) or raises a
RuntimeError. The first argument is an application's name as a string, the
second one is a path to be appended to parent's application path.
"""
@spec build_path(parent :: MonoRepo.parent() | :root, target :: String.t())
:: Path.t()
def build_path(parent \\ :root, target \\ "_build") do
get_path(parent, target)
end
@doc """
Searches for the root or parent application and returns a path to it's
configuration directory.
The first argument is an application's name as a string, the second one is a
path to be appended to parent's application path. Raises `RuntimeError` in
case the path can't be resolved.
"""
@spec build_config_path(parent :: MonoRepo.parent() | :root, target :: Path.t())
:: Path.t()
def build_config_path(parent \\ :root, target \\ Path.join("config", "config.exs"))
do
get_path(parent, target)
end
@doc """
Searches for the root or parent application and returns a path to it's
dependencies directory.
Builds a path to root/parent application's dependencies directory(deps) or
raises a `RuntimeError`. The first argument is an application's name as a
string, the second one is a path to be appended to parent's application path.
"""
@spec build_deps_path(parent :: MonoRepo.parent() | :root, target :: String.t())
:: Path.t()
def build_deps_path(parent \\ :root, target \\ "deps") do
get_path(parent, target)
end
@doc """
Searches for the root or parent application and returns a path to it's
lockfile path directory.
Builds a path to root/parent application's lockfile(mix.lock) or raises a
RuntimeError. The first argument is an application's name as a string, the
second one is a path to be appended to parent's application path.
"""
@spec build_lockfile_path(parent :: MonoRepo.parent() | :root,
target :: String.t())
:: Path.t()
def build_lockfile_path(parent \\ :root, target \\ "mix.lock") do
get_path(parent, target)
end
### PRIVATE ###
@spec get_path(MonoRepo.parent() | :root, String.t()) :: Path.t()
defp get_path(:root, target) do
:root
|> get_parent_path!()
|> append_path(target)
|> trim_dot_dir()
end
defp get_path(parent, target) do
parent
|> match_parent_path!()
|> append_path(target)
|> trim_dot_dir()
end
@spec get_parent_path!(:root) :: Path.t()
defp get_parent_path!(:root) do
if path = get_parent_path(:current) do
get_parent_path(path)
else
no_parent!()
end
end
@spec get_parent_path(:current) :: Path.t() | nil
defp get_parent_path(:current) do
parent = parent_path(".")
parent_apps = parent_apps_path(parent)
if exists?(parent_apps) do
parent
else
nil
end
end
@spec get_parent_path(Path.t()) :: Path.t()
defp get_parent_path(path) do
parent = parent_path(path)
parent_apps = parent_apps_path(parent)
if exists?(parent_apps) do
get_parent_path(parent)
else
path
end
end
@spec match_parent_path!(String.t()) :: Path.t()
defp match_parent_path!(target) do
if parent = match_parent_path(target) do
parent
else
no_parent!(target)
end
end
@spec match_parent_path(String.t()) :: Path.t()
defp match_parent_path(target) do
parent_path = parent_path(".")
match_parent_path(parent_path, target)
end
@spec match_parent_path(Path.t(), String.t()) :: Path.t() | nil
defp match_parent_path(parent_path, target) do
parent_apps = parent_apps_path(parent_path)
if exists?(parent_apps) do
parent_name = get_parent_name(parent_path)
if parent_name != target do
parent_path = parent_path(parent_path)
match_parent_path(parent_path, target)
else
parent_path
end
else
nil
end
end
@spec get_parent_name(Path.t()) :: String.t()
defp get_parent_name(parent_path) do
parent_path
|> Path.expand()
|> Path.split()
|> List.last()
end
@spec parent_path(Path.t()) :: Path.t()
defp parent_path(path) do
parent = Path.join(~w(.. ..))
Path.join(path, parent)
end
@spec parent_apps_path(Path.t()) :: Path.t()
defp parent_apps_path(path), do: Path.join(path, "apps")
@spec append_path(Path.t(), Path.t()) :: Path.t()
defp append_path(path1, path2) do
Path.join(path1, path2)
end
@spec trim_dot_dir(Path.t()) :: Path.t()
defp trim_dot_dir(path), do: String.trim_leading(path, "./")
@spec no_parent!() :: no_return()
defp no_parent!() do
raise RuntimeError, """
Current working directory doesn't have a parent umbrella aplication.
"""
end
@spec no_parent!(String.t()) :: no_return()
defp no_parent!(target) do
raise RuntimeError, """
Could not find a target parent application for: #{target}
"""
end
end
| 28.081218 | 85 | 0.667932 |
ffb4fc5670607097b718abc54db9a0c6aaff23e1 | 1,752 | ex | Elixir | integration_test/phoenix_example_app/lib/phoenix_example_app_web.ex | elitau/tix | 2aa5fe4d91e7962ebcdc9b668aacf65e09ff9bb8 | [
"MIT"
] | 1 | 2021-08-16T18:52:45.000Z | 2021-08-16T18:52:45.000Z | integration_test/phoenix_example_app/lib/phoenix_example_app_web.ex | elitau/tix | 2aa5fe4d91e7962ebcdc9b668aacf65e09ff9bb8 | [
"MIT"
] | 16 | 2021-03-09T19:39:31.000Z | 2022-03-15T15:20:24.000Z | integration_test/phoenix_example_app/lib/phoenix_example_app_web.ex | elitau/tix | 2aa5fe4d91e7962ebcdc9b668aacf65e09ff9bb8 | [
"MIT"
] | null | null | null | defmodule PhoenixExampleAppWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use PhoenixExampleAppWeb, :controller
use PhoenixExampleAppWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: PhoenixExampleAppWeb
import Plug.Conn
import PhoenixExampleAppWeb.Gettext
alias PhoenixExampleAppWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/phoenix_example_app_web/templates",
namespace: PhoenixExampleAppWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 1, get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import PhoenixExampleAppWeb.ErrorHelpers
import PhoenixExampleAppWeb.Gettext
alias PhoenixExampleAppWeb.Router.Helpers, as: Routes
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import PhoenixExampleAppWeb.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 25.028571 | 83 | 0.710616 |
ffb5157b87788f460b718564bf90e76467d9afd7 | 1,482 | ex | Elixir | lib/badges_web/live/test_live/form_component.ex | TomGrozev/SAD-Badges | a94331433ea21a0d719216ac8473e706166d6004 | [
"MIT"
] | null | null | null | lib/badges_web/live/test_live/form_component.ex | TomGrozev/SAD-Badges | a94331433ea21a0d719216ac8473e706166d6004 | [
"MIT"
] | null | null | null | lib/badges_web/live/test_live/form_component.ex | TomGrozev/SAD-Badges | a94331433ea21a0d719216ac8473e706166d6004 | [
"MIT"
] | null | null | null | defmodule BadgesWeb.TestLive.FormComponent do
use BadgesWeb, :live_component
alias Badges.Tests
@impl true
def update(%{test: test} = assigns, socket) do
changeset = Tests.change_test(test)
{:ok,
socket
|> assign(assigns)
|> assign(:changeset, changeset)}
end
@impl true
def handle_event("validate", %{"test" => test_params}, socket) do
changeset =
socket.assigns.test
|> Tests.change_test(test_params)
|> Map.put(:action, :validate)
{:noreply, assign(socket, :changeset, changeset)}
end
def handle_event("save", %{"test" => test_params}, socket) do
save_test(socket, socket.assigns.action, test_params)
end
defp save_test(socket, :edit, test_params) do
case Tests.update_test(socket.assigns.test, test_params) do
{:ok, _test} ->
{:noreply,
socket
|> put_flash(:info, "Test updated successfully")
|> push_redirect(to: socket.assigns.return_to)}
{:error, %Ecto.Changeset{} = changeset} ->
{:noreply, assign(socket, :changeset, changeset)}
end
end
defp save_test(socket, :new, test_params) do
case Tests.create_test(test_params) do
{:ok, _test} ->
{:noreply,
socket
|> put_flash(:info, "Test created successfully")
|> push_redirect(to: socket.assigns.return_to)}
{:error, %Ecto.Changeset{} = changeset} ->
{:noreply, assign(socket, changeset: changeset)}
end
end
end
| 26.464286 | 67 | 0.634278 |
ffb51f779aeb3dfdd764deb3f9d7c41dfa0b5bab | 1,335 | exs | Elixir | test/redex/command/subscribe_test.exs | esmaeilpour/redex | c2c6e29e3dec0df265fdcd9f24cd2471c8615ee7 | [
"Apache-2.0"
] | 173 | 2019-03-15T15:05:11.000Z | 2022-01-10T08:21:48.000Z | test/redex/command/subscribe_test.exs | esmaeilpour/redex | c2c6e29e3dec0df265fdcd9f24cd2471c8615ee7 | [
"Apache-2.0"
] | null | null | null | test/redex/command/subscribe_test.exs | esmaeilpour/redex | c2c6e29e3dec0df265fdcd9f24cd2471c8615ee7 | [
"Apache-2.0"
] | 9 | 2019-07-28T01:20:43.000Z | 2021-08-18T03:41:44.000Z | defmodule Redex.Command.SubscribeTest do
use ExUnit.Case, async: true
use ExUnitProperties
import Mox
import Redex.DataGenerators
import Redex.Command.SUBSCRIBE
setup :verify_on_exit!
property "SUBSCRIBE to some channels" do
check all state <- state(channels: true),
channels <- list_of(binary(), min_length: 1) do
new_state =
for ch <- channels, reduce: state do
state = %{channels: channels} ->
channels =
if ch in channels do
channels
else
Pg2Mock
|> expect(:create, fn ^ch -> :ok end)
|> expect(:join, fn ^ch, pid when pid == self() -> :ok end)
[ch | channels]
end
state = %{state | channels: channels}
subscribed = length(channels)
ProtocolMock
|> expect(:reply, fn ["subscribe", ^ch, ^subscribed], ^state -> state end)
state
end
assert new_state == exec(channels, state)
end
end
test "SUBSCRIBE with wrong number of arguments" do
error = {:error, "ERR wrong number of arguments for 'SUBSCRIBE' command"}
state = state()
ProtocolMock
|> expect(:reply, fn ^error, ^state -> state end)
assert state == exec([], state)
end
end
| 25.673077 | 86 | 0.561049 |
ffb52dedf7440e43199f08d43a1d02cefa1522a9 | 6,655 | ex | Elixir | lib/mix/lib/mix/tasks/compile.protocols.ex | hurrycaner/elixir | c54daf97764c222a3ade6c84ac89bacb68abb0fb | [
"Apache-2.0"
] | 243 | 2020-02-03T03:48:51.000Z | 2021-11-08T12:56:25.000Z | lib/mix/lib/mix/tasks/compile.protocols.ex | hurrycaner/elixir | c54daf97764c222a3ade6c84ac89bacb68abb0fb | [
"Apache-2.0"
] | 6 | 2021-03-19T12:33:21.000Z | 2021-04-02T17:52:45.000Z | lib/mix/lib/mix/tasks/compile.protocols.ex | hurrycaner/elixir | c54daf97764c222a3ade6c84ac89bacb68abb0fb | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Compile.Protocols do
use Mix.Task.Compiler
@manifest "compile.protocols"
@manifest_vsn 1
@moduledoc ~S"""
Consolidates all protocols in all paths.
This task is automatically invoked unless the project
disables the `:consolidate_protocols` option in their
configuration.
## Consolidation
Protocol consolidation is useful in production when no
dynamic code loading will happen, effectively optimizing
protocol dispatches by not accounting for code loading.
This task consolidates all protocols in the code path
and outputs the new binary files to the given directory.
Defaults to "_build/MIX_ENV/lib/YOUR_APP/consolidated"
for regular apps and "_build/MIX_ENV/consolidated" in
umbrella projects.
In case you are manually compiling protocols or building
releases, you need to take the generated protocols into
account. This can be done with:
$ elixir -pa _build/MIX_ENV/lib/YOUR_APP/consolidated -S mix run
Or in umbrellas:
$ elixir -pa _build/MIX_ENV/consolidated -S mix run
You can verify a protocol is consolidated by checking
its attributes:
iex> Protocol.consolidated?(Enumerable)
true
"""
@impl true
def run(args) do
config = Mix.Project.config()
Mix.Task.run("compile")
{opts, _, _} = OptionParser.parse(args, switches: [force: :boolean, verbose: :boolean])
manifest = manifest()
output = Mix.Project.consolidation_path(config)
protocols_and_impls = protocols_and_impls(config)
cond do
opts[:force] || Mix.Utils.stale?([Mix.Project.config_mtime()], [manifest]) ->
clean()
paths = consolidation_paths()
paths
|> Protocol.extract_protocols()
|> consolidate(paths, output, manifest, protocols_and_impls, opts)
protocols_and_impls ->
manifest
|> diff_manifest(protocols_and_impls, output)
|> consolidate(consolidation_paths(), output, manifest, protocols_and_impls, opts)
true ->
:noop
end
end
@impl true
def clean do
File.rm(manifest())
File.rm_rf(Mix.Project.consolidation_path())
end
@impl true
def manifests, do: [manifest()]
defp manifest, do: Path.join(Mix.Project.manifest_path(), @manifest)
@doc """
Returns if protocols have been consolidated at least once.
"""
def consolidated? do
File.regular?(manifest())
end
defp protocols_and_impls(config) do
deps = for %{scm: scm, opts: opts} <- Mix.Dep.cached(), not scm.fetchable?, do: opts[:build]
app =
if Mix.Project.umbrella?(config) do
[]
else
[Mix.Project.app_path(config)]
end
protocols_and_impls =
for path <- app ++ deps do
manifest_path = Path.join(path, ".mix/compile.elixir")
compile_path = Path.join(path, "ebin")
Mix.Compilers.Elixir.protocols_and_impls(manifest_path, compile_path)
end
Enum.concat(protocols_and_impls)
end
defp consolidation_paths do
filter_otp(:code.get_path(), :code.lib_dir())
end
defp filter_otp(paths, otp) do
Enum.filter(paths, &(not :lists.prefix(otp, &1)))
end
defp consolidate([], _paths, output, manifest, metadata, _opts) do
File.mkdir_p!(output)
write_manifest(manifest, metadata)
:noop
end
defp consolidate(protocols, paths, output, manifest, metadata, opts) do
File.mkdir_p!(output)
protocols
|> Enum.uniq()
|> Enum.map(&Task.async(fn -> consolidate(&1, paths, output, opts) end))
|> Enum.map(&Task.await(&1, :infinity))
write_manifest(manifest, metadata)
:ok
end
defp consolidate(protocol, paths, output, opts) do
impls = Protocol.extract_impls(protocol, paths)
reload(protocol)
case Protocol.consolidate(protocol, impls) do
{:ok, binary} ->
File.write!(Path.join(output, "#{Atom.to_string(protocol)}.beam"), binary)
if opts[:verbose] do
Mix.shell().info("Consolidated #{inspect_protocol(protocol)}")
end
# If we remove a dependency and we have implemented one of its
# protocols locally, we will mark the protocol as needing to be
# reconsolidated when the implementation is removed even though
# the protocol no longer exists. Although most times removing a
# dependency will trigger a full recompilation, such won't happen
# in umbrella apps with shared build.
{:error, :no_beam_info} ->
remove_consolidated(protocol, output)
if opts[:verbose] do
Mix.shell().info("Unavailable #{inspect_protocol(protocol)}")
end
end
end
# We cannot use the inspect protocol while consolidating
# since inspect may not be available.
defp inspect_protocol(protocol) do
Code.Identifier.inspect_as_atom(protocol)
end
defp reload(module) do
:code.purge(module)
:code.delete(module)
end
defp read_manifest(manifest, output) do
try do
[@manifest_vsn | metadata] = manifest |> File.read!() |> :erlang.binary_to_term()
metadata
rescue
_ ->
# If there is no manifest or it is out of date, remove old files
File.rm_rf(output)
[]
end
end
defp write_manifest(manifest, metadata) do
File.mkdir_p!(Path.dirname(manifest))
manifest_data = :erlang.term_to_binary([@manifest_vsn | metadata], [:compressed])
File.write!(manifest, manifest_data)
end
defp diff_manifest(manifest, new_metadata, output) do
modified = Mix.Utils.last_modified(manifest)
old_metadata = read_manifest(manifest, output)
protocols =
for {protocol, :protocol, beam} <- new_metadata,
Mix.Utils.last_modified(beam) > modified,
remove_consolidated(protocol, output),
do: {protocol, true},
into: %{}
protocols =
Enum.reduce(new_metadata -- old_metadata, protocols, fn
{_, {:impl, protocol}, _beam}, protocols ->
Map.put(protocols, protocol, true)
{protocol, :protocol, _beam}, protocols ->
Map.put(protocols, protocol, true)
end)
removed_metadata = old_metadata -- new_metadata
removed_protocols =
for {protocol, :protocol, _beam} <- removed_metadata,
remove_consolidated(protocol, output),
do: {protocol, true},
into: %{}
protocols =
for {_, {:impl, protocol}, _beam} <- removed_metadata,
not Map.has_key?(removed_protocols, protocol),
do: {protocol, true},
into: protocols
Map.keys(protocols)
end
defp remove_consolidated(protocol, output) do
File.rm(Path.join(output, "#{Atom.to_string(protocol)}.beam"))
end
end
| 28.440171 | 96 | 0.669572 |
ffb53e8d0be68fe0f92783115e3c07b756d44247 | 961 | exs | Elixir | auditor/test/poll_test.exs | zeroasterisk/faas-cold-start-auditor | eef6c50d4f1d33dfad81bc9c8e571f7d827a4223 | [
"MIT"
] | 1 | 2018-07-31T05:43:23.000Z | 2018-07-31T05:43:23.000Z | auditor/test/poll_test.exs | zeroasterisk/faas-cold-start-auditor | eef6c50d4f1d33dfad81bc9c8e571f7d827a4223 | [
"MIT"
] | null | null | null | auditor/test/poll_test.exs | zeroasterisk/faas-cold-start-auditor | eef6c50d4f1d33dfad81bc9c8e571f7d827a4223 | [
"MIT"
] | null | null | null | defmodule PollTest do
use ExUnit.Case
doctest Poll
test "requests 10 total, 1 concurrant @ google" do
res = Poll.requests(10, 1, "https://www.google.com/")
# [262.364, 259.223, 262.516, 255.973, 256.028, 262.277, 259.421, 251.385, 259.483, 251.451]
assert res |> Enum.count() == 10
assert is_float(res |> List.first()) == true
end
test "requests 10 total, 3 concurrant @ google" do
res = Poll.requests(10, 3, "https://www.google.com/")
# [262.364, 259.223, 262.516, 255.973, 256.028, 262.277, 259.421, 251.385, 259.483, 251.451]
assert res |> Enum.count() == 10
assert is_float(res |> List.first()) == true
end
test "concurrent_requests 10 @ google" do
res = Poll.concurrent_requests(10, "https://www.google.com/")
# [262.364, 259.223, 262.516, 255.973, 256.028, 262.277, 259.421, 251.385, 259.483, 251.451]
assert res |> Enum.count() == 10
assert is_float(res |> List.first()) == true
end
end
| 36.961538 | 96 | 0.633715 |
ffb55df925af64af97b923b9397b7fbe7a22ccc1 | 991 | exs | Elixir | test/gen_magic/pool/poolboy_test.exs | kianmeng/gen_magic | db6888a90f5b1fa065abe8b45d8fbac0bde8e3c0 | [
"Apache-2.0"
] | 7 | 2020-05-13T22:47:32.000Z | 2022-01-09T00:44:08.000Z | test/gen_magic/pool/poolboy_test.exs | kianmeng/gen_magic | db6888a90f5b1fa065abe8b45d8fbac0bde8e3c0 | [
"Apache-2.0"
] | 13 | 2020-05-04T14:06:48.000Z | 2022-01-05T12:41:04.000Z | test/gen_magic/pool/poolboy_test.exs | kianmeng/gen_magic | db6888a90f5b1fa065abe8b45d8fbac0bde8e3c0 | [
"Apache-2.0"
] | 4 | 2020-04-28T16:58:50.000Z | 2021-07-24T21:36:32.000Z | defmodule GenMagic.Pool.PoolboyTest do
use GenMagic.MagicCase
alias GenMagic.Pool.Poolboy, as: Pool
describe "Poolboy" do
test "can be addressed by name if started by name" do
{:ok, _} = Pool.start_link(pool_name: TestPool)
assert_file(TestPool)
end
test "can not be started without pool_name" do
assert_raise ArgumentError, "pool_name must be set", fn ->
Pool.start_link([])
end
end
test "works concurrently" do
{:ok, _} = Pool.start_link(pool_name: TestPool, pool_size: 2)
parent_pid = self()
for _ <- 1..10 do
spawn(fn ->
for _ <- 1..10 do
assert_file(TestPool)
end
send(parent_pid, :ok)
end)
end
for _ <- 1..10 do
assert_receive :ok, 5000
end
end
end
defp assert_file(pool), do: assert_file(pool, absolute_path("Makefile"))
defp assert_file(pool, path), do: assert({:ok, _} = Pool.perform(pool, path, []))
end
| 24.775 | 83 | 0.606458 |
ffb59b330b756918ad886d982c070f5e32c3ce2a | 937 | ex | Elixir | test/support/channel_case.ex | orneryhippo/phlearn | 7ef72120c8d4719ef90809f16907b5e98d6c54b1 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | orneryhippo/phlearn | 7ef72120c8d4719ef90809f16907b5e98d6c54b1 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | orneryhippo/phlearn | 7ef72120c8d4719ef90809f16907b5e98d6c54b1 | [
"MIT"
] | null | null | null | defmodule PhlearnWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
# The default endpoint for testing
@endpoint PhlearnWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Phlearn.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Phlearn.Repo, {:shared, self()})
end
:ok
end
end
| 24.657895 | 69 | 0.716115 |
ffb5b732eed3a825a991362baab99a0d937871d1 | 1,093 | ex | Elixir | lib/cessao_recebiveis.ex | rhnonose/receptible_calculator | 33667106384a739b822f5b95c093ba08c182fe1f | [
"MIT"
] | null | null | null | lib/cessao_recebiveis.ex | rhnonose/receptible_calculator | 33667106384a739b822f5b95c093ba08c182fe1f | [
"MIT"
] | null | null | null | lib/cessao_recebiveis.ex | rhnonose/receptible_calculator | 33667106384a739b822f5b95c093ba08c182fe1f | [
"MIT"
] | null | null | null | defmodule CessaoRecebiveis do
@moduledoc false
def generate_random do
Enum.map(1..1000, fn _ -> {Enum.random(50..700), Enum.random(1..12)} end)
++ Enum.map(1..8000, fn _ -> {Enum.random(701..2000), Enum.random(1..12)} end)
++ Enum.map(1..1000, fn _ -> {Enum.random(2001..10_000), Enum.random(1..12)} end)
end
def calculate_minimum_loss(numbers) do
numbers
|> Enum.map(fn {value, quoc} -> %{value: value / quoc, loss: get_loss(quoc)} end)
|> Enum.sort(fn %{loss: loss1}, %{loss: loss2} -> loss1 <= loss2 end)
|> IO.inspect
|> Enum.reduce_while(%{values: [], sum: 0.0}, &collect/2)
end
def collect(%{value: value} = current, %{values: values, sum: sum} = acc) do
if sum < 700_000 do
{:cont, %{values: values ++ [current], sum: sum + value}}
else
{:halt, acc}
end
end
@loss_table %{
1 => 0.1,
2 => 0.08,
3 => 0.06,
4 => 0.04,
5 => 0.03,
6 => 0.02,
7 => 0.01,
8 => 0.01,
9 => 0.01,
10 => 0.01,
11 => 0.01,
12 => 0.01,
}
defp get_loss(quoc), do: @loss_table[quoc]
end
| 24.840909 | 85 | 0.548033 |
ffb5e942fd0ef26b308d5a1fb7364ee985682476 | 1,191 | ex | Elixir | lib/bookish/endpoint.ex | beccanelson/8th-light-library | d0aa70371ca7645abf8609b53202ebb1808ca633 | [
"MIT"
] | null | null | null | lib/bookish/endpoint.ex | beccanelson/8th-light-library | d0aa70371ca7645abf8609b53202ebb1808ca633 | [
"MIT"
] | 9 | 2016-09-22T20:30:22.000Z | 2016-09-22T21:00:53.000Z | lib/bookish/endpoint.ex | beccanelson/library | d0aa70371ca7645abf8609b53202ebb1808ca633 | [
"MIT"
] | null | null | null | defmodule Bookish.Endpoint do
use Phoenix.Endpoint, otp_app: :bookish
socket "/socket", Bookish.UserSocket
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/", from: :bookish, gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Poison
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_bookish_key",
signing_salt: "ojZozggQ"
plug Bookish.Router
end
| 27.697674 | 69 | 0.714526 |
ffb5ea155b788e298ade1902da5a4cb48f6a7683 | 3,199 | ex | Elixir | clients/domains/lib/google_api/domains/v1beta1/model/transfer_domain_request.ex | corp-momenti/elixir-google-api | fe1580e305789ab2ca0741791b8ffe924bd3240c | [
"Apache-2.0"
] | null | null | null | clients/domains/lib/google_api/domains/v1beta1/model/transfer_domain_request.ex | corp-momenti/elixir-google-api | fe1580e305789ab2ca0741791b8ffe924bd3240c | [
"Apache-2.0"
] | null | null | null | clients/domains/lib/google_api/domains/v1beta1/model/transfer_domain_request.ex | corp-momenti/elixir-google-api | fe1580e305789ab2ca0741791b8ffe924bd3240c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Domains.V1beta1.Model.TransferDomainRequest do
@moduledoc """
Request for the `TransferDomain` method.
## Attributes
* `authorizationCode` (*type:* `GoogleApi.Domains.V1beta1.Model.AuthorizationCode.t`, *default:* `nil`) - The domain's transfer authorization code. You can obtain this from the domain's current registrar.
* `contactNotices` (*type:* `list(String.t)`, *default:* `nil`) - The list of contact notices that you acknowledge. The notices needed here depend on the values specified in `registration.contact_settings`.
* `registration` (*type:* `GoogleApi.Domains.V1beta1.Model.Registration.t`, *default:* `nil`) - Required. The complete `Registration` resource to be created. You can leave `registration.dns_settings` unset to import the domain's current DNS configuration from its current registrar. Use this option only if you are sure that the domain's current DNS service will not cease upon transfer, as is often the case for DNS services provided for free by the registrar.
* `validateOnly` (*type:* `boolean()`, *default:* `nil`) - Validate the request without actually transferring the domain.
* `yearlyPrice` (*type:* `GoogleApi.Domains.V1beta1.Model.Money.t`, *default:* `nil`) - Required. Acknowledgement of the price to transfer or renew the domain for one year. Call `RetrieveTransferParameters` to obtain the price, which you must acknowledge.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:authorizationCode => GoogleApi.Domains.V1beta1.Model.AuthorizationCode.t() | nil,
:contactNotices => list(String.t()) | nil,
:registration => GoogleApi.Domains.V1beta1.Model.Registration.t() | nil,
:validateOnly => boolean() | nil,
:yearlyPrice => GoogleApi.Domains.V1beta1.Model.Money.t() | nil
}
field(:authorizationCode, as: GoogleApi.Domains.V1beta1.Model.AuthorizationCode)
field(:contactNotices, type: :list)
field(:registration, as: GoogleApi.Domains.V1beta1.Model.Registration)
field(:validateOnly)
field(:yearlyPrice, as: GoogleApi.Domains.V1beta1.Model.Money)
end
defimpl Poison.Decoder, for: GoogleApi.Domains.V1beta1.Model.TransferDomainRequest do
def decode(value, options) do
GoogleApi.Domains.V1beta1.Model.TransferDomainRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Domains.V1beta1.Model.TransferDomainRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 54.220339 | 465 | 0.747734 |
ffb5ed7b7cbf406cfb9d65b6c903d023667a96d1 | 4,319 | ex | Elixir | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/binding.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/binding.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/binding.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudResourceManager.V1.Model.Binding do
@moduledoc """
Associates `members`, or principals, with a `role`.
## Attributes
* `condition` (*type:* `GoogleApi.CloudResourceManager.V1.Model.Expr.t`, *default:* `nil`) - The condition that is associated with this binding. If the condition evaluates to `true`, then this binding applies to the current request. If the condition evaluates to `false`, then this binding does not apply to the current request. However, a different role binding might grant the same role to one or more of the principals in this binding. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
* `members` (*type:* `list(String.t)`, *default:* `nil`) - Specifies the principals requesting access for a Cloud Platform resource. `members` can have the following values: * `allUsers`: A special identifier that represents anyone who is on the internet; with or without a Google account. * `allAuthenticatedUsers`: A special identifier that represents anyone who is authenticated with a Google account or a service account. * `user:{emailid}`: An email address that represents a specific Google account. For example, `alice@example.com` . * `serviceAccount:{emailid}`: An email address that represents a service account. For example, `my-other-app@appspot.gserviceaccount.com`. * `group:{emailid}`: An email address that represents a Google group. For example, `admins@example.com`. * `deleted:user:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a user that has been recently deleted. For example, `alice@example.com?uid=123456789012345678901`. If the user is recovered, this value reverts to `user:{emailid}` and the recovered user retains the role in the binding. * `deleted:serviceAccount:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a service account that has been recently deleted. For example, `my-other-app@appspot.gserviceaccount.com?uid=123456789012345678901`. If the service account is undeleted, this value reverts to `serviceAccount:{emailid}` and the undeleted service account retains the role in the binding. * `deleted:group:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a Google group that has been recently deleted. For example, `admins@example.com?uid=123456789012345678901`. If the group is recovered, this value reverts to `group:{emailid}` and the recovered group retains the role in the binding. * `domain:{domain}`: The G Suite domain (primary) that represents all the users of that domain. For example, `google.com` or `example.com`.
* `role` (*type:* `String.t`, *default:* `nil`) - Role that is assigned to the list of `members`, or principals. For example, `roles/viewer`, `roles/editor`, or `roles/owner`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:condition => GoogleApi.CloudResourceManager.V1.Model.Expr.t() | nil,
:members => list(String.t()) | nil,
:role => String.t() | nil
}
field(:condition, as: GoogleApi.CloudResourceManager.V1.Model.Expr)
field(:members, type: :list)
field(:role)
end
defimpl Poison.Decoder, for: GoogleApi.CloudResourceManager.V1.Model.Binding do
def decode(value, options) do
GoogleApi.CloudResourceManager.V1.Model.Binding.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudResourceManager.V1.Model.Binding do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 81.490566 | 1,972 | 0.755499 |
ffb635c61cab6e7bcebbb76693f443fffc95345c | 2,401 | exs | Elixir | test/exdns/storage/ets_storage_test.exs | jeanparpaillon/exdns | 53b7fc780399eda96d42052e11e03d5eb0dcd789 | [
"MIT"
] | 16 | 2016-05-26T10:11:57.000Z | 2021-01-08T15:09:19.000Z | test/exdns/storage/ets_storage_test.exs | jeanparpaillon/exdns | 53b7fc780399eda96d42052e11e03d5eb0dcd789 | [
"MIT"
] | 9 | 2016-08-11T00:48:27.000Z | 2020-09-16T22:10:07.000Z | test/exdns/storage/ets_storage_test.exs | jeanparpaillon/exdns | 53b7fc780399eda96d42052e11e03d5eb0dcd789 | [
"MIT"
] | 11 | 2016-08-10T08:13:36.000Z | 2021-04-03T10:20:11.000Z | defmodule Exdns.Storage.EtsStorageTest do
use ExUnit.Case, async: false
test "create" do
assert Exdns.Storage.EtsStorage.create(:test) == :ok
end
test "create lookup table" do
assert Exdns.Storage.EtsStorage.create(:lookup_table) == :ok
end
test "create schema is not implemented" do
assert Exdns.Storage.EtsStorage.create(:schema) == {:error, :not_implemented}
end
test "insert" do
Exdns.Storage.EtsStorage.create(:test)
assert Exdns.Storage.EtsStorage.insert(:test, {:key, :value})
assert :ets.lookup(:test, :key) == [key: :value]
end
test "delete table" do
Exdns.Storage.EtsStorage.create(:test)
assert Exdns.Storage.EtsStorage.delete_table(:test) == :ok
end
test "backup table not implemented" do
assert Exdns.Storage.EtsStorage.backup_table(:test) == {:error, :not_implemented}
end
test "backup tables not implemented" do
assert Exdns.Storage.EtsStorage.backup_tables() == {:error, :not_implemented}
end
test "select key from table" do
Exdns.Storage.EtsStorage.create(:test)
assert Exdns.Storage.EtsStorage.select(:test, :key) == []
Exdns.Storage.EtsStorage.insert(:test, {:key, :value})
assert Exdns.Storage.EtsStorage.select(:test, :key) == [key: :value]
end
test "select match spec from table" do
Exdns.Storage.EtsStorage.create(:test)
assert Exdns.Storage.EtsStorage.select(:test, [{{'$1', '$2'}, [], [{{'$1', '$2'}}]}], :infinite) == []
Exdns.Storage.EtsStorage.insert(:test, {:key, :value})
assert Exdns.Storage.EtsStorage.select(:test, :key) == [key: :value]
end
test "foldl on table" do
Exdns.Storage.EtsStorage.create(:test)
Exdns.Storage.EtsStorage.insert(:test, {:key1, 1})
Exdns.Storage.EtsStorage.insert(:test, {:key2, 1})
assert Exdns.Storage.EtsStorage.foldl(fn({_key, val}, acc) -> val + acc end, 0, :test) == 2
end
test "empty table" do
Exdns.Storage.EtsStorage.create(:test)
Exdns.Storage.EtsStorage.insert(:test, {:key, :value})
assert Exdns.Storage.EtsStorage.select(:test, :key) == [key: :value]
Exdns.Storage.EtsStorage.empty_table(:test)
assert Exdns.Storage.EtsStorage.select(:test, :key) == []
end
test "list table" do
Exdns.Storage.EtsStorage.create(:test)
Exdns.Storage.EtsStorage.insert(:test, {:key, :value})
assert Exdns.Storage.EtsStorage.list_table(:test) == [key: :value]
end
end
| 33.816901 | 106 | 0.683882 |
ffb65fbd57c72a1a269271e8324454ca51456a56 | 2,992 | ex | Elixir | lib/day7.ex | erljef/adventofcode-2017 | ce601ee8b812a0255178a4074f05a577c9288a8f | [
"WTFPL"
] | null | null | null | lib/day7.ex | erljef/adventofcode-2017 | ce601ee8b812a0255178a4074f05a577c9288a8f | [
"WTFPL"
] | null | null | null | lib/day7.ex | erljef/adventofcode-2017 | ce601ee8b812a0255178a4074f05a577c9288a8f | [
"WTFPL"
] | null | null | null | defmodule Day7 do
def read_file(path) do
File.stream!(path)
|> parse_input
end
def parse_input(input) do
input
|> Stream.map(&String.split/1)
|> Stream.filter(fn x -> !Enum.empty? x end)
|> Enum.to_list
|> Enum.map(&row/1)
|> Map.new(fn item -> {item[:name], item} end)
end
def row(list) do
list
|> Enum.filter(fn item -> item != "->" end)
|> Enum.map(fn item -> String.trim(item, ",") end)
|> Enum.reduce(%{}, &populate_map/2)
end
def populate_map(item, map) do
cond do
!Map.has_key?(map, :name) -> Map.put(map, :name, item)
!Map.has_key?(map, :weight) -> Map.put(map, :weight, item |> String.trim("(") |> String.trim(")") |> String.to_integer)
true -> Map.update(map, :children, [item], fn children -> children ++ [item |> String.trim(",")] end)
end
end
def find_root(discs) do
find_root(discs, discs |> Map.values |> hd)
end
def find_root(discs, disc) do
parent = parent(discs, disc)
if parent == nil do
disc
else
find_root(discs, parent)
end
end
def parent(discs, disc) do
discs |> Map.values |> Enum.find(fn d -> Enum.find(Map.get(d, :children, []), fn child -> child == disc[:name] end) end)
end
def children(discs, disc) do
Map.get(disc, :children, []) |> Enum.map(fn name -> discs[name] end)
end
def tree_weight(discs, disc) do
disc[:weight] + (subtree_weights(discs, disc) |> Enum.reduce(0, &(&1 + &2)))
end
def subtree_weight(discs, disc) do
children(discs, disc) |> Enum.reduce(0, fn child, acc -> acc + child[:weight] + subtree_weight(discs, child) end)
end
def subtree_weights(discs, disc) do
children(discs, disc) |> Enum.map(fn disc -> disc[:weight] + subtree_weight(discs, disc) end)
end
def find_unbalanced(discs) do
find_unbalanced(discs, find_root(discs))
end
def find_unbalanced(discs, disc) when is_map(disc) do
children = children(discs, disc)
if is_balanced?(discs, disc) do
0
else
balanced_children = children |> Enum.map(fn child -> is_balanced?(discs, child) end)
if balanced_children |> Enum.filter(&(&1)) |> Enum.count == length(children) do
groups = children |> Enum.group_by(fn child -> tree_weight(discs, child) end)
w = groups |> Enum.find(fn {_, list} -> length(list) == 1 end)
c = groups |> Enum.find(fn {_, list} -> length(list) > 1 end)
{_, [wrong]} = groups |> Enum.find(fn {_, list} -> length(list) == 1 end)
{_, [correct | _]} = groups |> Enum.find(fn {_, list} -> length(list) > 1 end)
wrong[:weight] - (tree_weight(discs, wrong) - tree_weight(discs, correct))
else
Map.get(disc, :children, []) |> Enum.map(fn child -> find_unbalanced(discs, discs[child]) end) |> Enum.max
end
end
end
def is_balanced?(discs, disc) do
is_balanced?(subtree_weights(discs, disc))
end
def is_balanced?(weights) do
MapSet.new(weights) |> MapSet.size == 1
end
end | 32.879121 | 125 | 0.611965 |
ffb674802029c171f869772edef48a69f3d4ddfb | 1,157 | exs | Elixir | mix.exs | adigitalmonk/rinku | 57475c99cb66b527a2f52c73ee8d5af7e136a682 | [
"MIT"
] | null | null | null | mix.exs | adigitalmonk/rinku | 57475c99cb66b527a2f52c73ee8d5af7e136a682 | [
"MIT"
] | null | null | null | mix.exs | adigitalmonk/rinku | 57475c99cb66b527a2f52c73ee8d5af7e136a682 | [
"MIT"
] | null | null | null | defmodule Rinku.MixProject do
@moduledoc false
use Mix.Project
@version "0.0.1"
def project do
[
app: :rinku,
version: @version,
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps(),
source_url: "https://github.com/adigitalmonk/rinku",
homepage_url: "https://adigitalmonk.github.io/rinku",
docs: [
main: "readme",
extras: ["README.md"]
],
dialyzer: [
ignore_warnings: "dialyzer.ignore.exs",
list_unused_filters: true,
plt_add_apps: [:mix],
plt_file: {:no_warn, plt_file_path()}
]
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
defp deps do
[
{:benchee, "~> 1.0", only: :dev, runtime: false},
{:credo, "~> 1.5", only: :dev, runtime: false},
{:ex_doc, "~> 0.24", only: :dev, runtime: false},
{:dialyxir, "~> 1.0", only: :dev, runtime: false}
]
end
defp plt_file_path do
[Mix.Project.build_path(), "plt", "dialyxir.plt"]
|> Path.join()
|> Path.expand()
end
end
| 22.686275 | 59 | 0.559205 |
ffb67a919cb04af07992231cee02216c9bc182fd | 3,643 | exs | Elixir | test/kelvin/in_order_subscription_test.exs | NFIBrokerage/kelvin | 2c30111d2b2f1f8b8082d99b78ff2c465acb2ea8 | [
"Apache-2.0"
] | null | null | null | test/kelvin/in_order_subscription_test.exs | NFIBrokerage/kelvin | 2c30111d2b2f1f8b8082d99b78ff2c465acb2ea8 | [
"Apache-2.0"
] | 2 | 2021-05-27T19:49:14.000Z | 2021-08-12T14:31:09.000Z | test/kelvin/in_order_subscription_test.exs | NFIBrokerage/kelvin | 2c30111d2b2f1f8b8082d99b78ff2c465acb2ea8 | [
"Apache-2.0"
] | null | null | null | defmodule Kelvin.InOrderSubscriptionTest do
use ExUnit.Case, async: true
@moduletag :capture_log
alias Extreme.Messages
setup do
stream_name = "Kelvin.Test-#{UUID.uuid4()}"
producer_name = String.to_atom("Kelvin.InOrderProducer-#{UUID.uuid4()}")
[stream_name: stream_name, producer_name: producer_name]
end
describe "given events have been written to a stream" do
setup c do
write_events(0..100, c.stream_name)
:ok
end
test "a subscription reads all written events and new ones", c do
opts = [
producer_name: c.producer_name,
stream_name: c.stream_name,
restore_stream_position!: &restore_stream_position!/0,
test_proc: self()
]
start_supervised!({MyInOrderSupervisor, opts})
for n <- 0..100 do
assert_receive {:events, [event]}, 6_000
assert event.event.data == to_string(n)
end
write_events(101..200, c.stream_name)
for n <- 101..200 do
assert_receive {:events, [event]}, 1_000
assert event.event.data == to_string(n)
end
end
test "a subscription catches up even if a tcp_closed occurs", c do
opts = [
producer_name: c.producer_name,
stream_name: c.stream_name,
restore_stream_position!: &restore_stream_position!/0,
test_proc: self()
]
start_supervised!({MyInOrderSupervisor, opts})
for n <- 0..100 do
assert_receive {:events, [event]}, 6_000
assert event.event.data == to_string(n)
end
monitor_ref =
ExtremeClient.Connection
|> GenServer.whereis()
|> Process.monitor()
send(ExtremeClient.Connection, {:tcp_closed, ""})
assert_receive {:DOWN, ^monitor_ref, _, _, _}
# we're hardcoding the restore_stream_position! function so this will
# restart from 0 instead of the current stream position as would be the
# case in a real-life system
for n <- 0..100 do
assert_receive {:events, [event]}, 10_000
assert event.event.data == to_string(n)
end
write_events(101..200, c.stream_name)
for n <- 101..200 do
assert_receive {:events, [event]}, 1_000
assert event.event.data == to_string(n)
end
end
end
describe "given only a few events have been written to a stream" do
setup c do
write_events(0..10, c.stream_name)
:ok
end
test "a slow subscription catches up", c do
opts = [
producer_name: c.producer_name,
stream_name: c.stream_name,
restore_stream_position!: &restore_stream_position!/0,
test_proc: self(),
# note how we add an artificial bottleneck to the consumer here
sleep_time: 100,
# and tune down the catch-up (and therefore max buffer queue size)
catch_up_chunk_size: 1
# in order to simulate a consumer which is slow and get coverage
# on the supply-buffering we do with the queue
]
start_supervised!({MyInOrderSupervisor, opts})
for n <- 0..10 do
assert_receive {:events, [event]}, 6_000
assert event.event.data == to_string(n)
end
end
end
defp restore_stream_position!, do: -1
defp write_events(range, stream) do
range
|> Enum.map(fn n ->
Messages.NewEvent.new(
event_id: Extreme.Tools.generate_uuid(),
event_type: "kelvin_test_event",
data_content_type: 1,
metadata_content_type: 1,
# valid JSON
data: to_string(n),
metadata: "{}"
)
end)
|> ExtremeClient.append_events(stream)
end
end
| 27.598485 | 77 | 0.629426 |
ffb6ab1fcfd0aab396738300679266e1cda341ba | 138 | exs | Elixir | test/fixtures/parser/disable_migration_lock_true.exs | maximemenager/strong_migrations | b7e091d2cfed73098d3bf683c7ce5c8ceee3159b | [
"MIT"
] | 23 | 2021-10-29T19:58:35.000Z | 2021-11-13T21:42:45.000Z | test/fixtures/parser/disable_migration_lock_true.exs | maximemenager/strong_migrations | b7e091d2cfed73098d3bf683c7ce5c8ceee3159b | [
"MIT"
] | 1 | 2021-10-31T03:57:47.000Z | 2021-10-31T14:33:45.000Z | test/fixtures/parser/disable_migration_lock_true.exs | surgeventures/strong_migrations | 3c82e34a6e7a372c6de17ba7a0b07da7664baa26 | [
"MIT"
] | 3 | 2021-10-31T02:14:10.000Z | 2021-11-09T08:07:22.000Z | defmodule DisableMigrationLockTrue do
@moduledoc false
use Ecto.Migration
@disable_migration_lock true
def change do
end
end
| 12.545455 | 37 | 0.782609 |
ffb6dda398346f37b462053b8a384cbbfaa4cd80 | 15,018 | exs | Elixir | test/exq_test.exs | onpointvn/exq | ee4db22624fb8a00ddc01c35a135e24de31b5a66 | [
"Apache-2.0"
] | null | null | null | test/exq_test.exs | onpointvn/exq | ee4db22624fb8a00ddc01c35a135e24de31b5a66 | [
"Apache-2.0"
] | null | null | null | test/exq_test.exs | onpointvn/exq | ee4db22624fb8a00ddc01c35a135e24de31b5a66 | [
"Apache-2.0"
] | null | null | null | defmodule ExqTest do
use ExUnit.Case
alias Exq.Redis.JobQueue
alias Exq.Redis.JobStat
alias Exq.Support.Node
import ExqTestUtil
defmodule PerformWorker do
def perform do
send(:exqtest, {:worked})
end
end
defmodule PerformArgWorker do
def perform(arg) do
send(:exqtest, {:worked, arg})
end
end
defmodule SleepWorker do
def perform(time, message) do
:timer.sleep(time)
send(:exqtest, {message})
end
end
defmodule SleepLastWorker do
def perform(time, message) do
Process.register(self(), :sleep_last_worker)
send(:exqtest, {message})
:timer.sleep(time)
end
end
defmodule EmptyMethodWorker do
def perform do
end
end
defmodule MissingMethodWorker do
end
defmodule FailWorker do
def failure_perform do
_ = :num + 1
send(:exqtest, {:worked})
end
end
setup do
TestRedis.setup()
on_exit(fn ->
wait()
TestRedis.teardown()
end)
:ok
end
test "start using registered name" do
{:ok, exq_sup} = Exq.start_link(name: CustomManager)
assert_exq_up(CustomManager)
stop_process(exq_sup)
end
test "start multiple exq instances using registered name" do
{:ok, sup1} = Exq.start_link(name: CustomManager1)
assert_exq_up(CustomManager1)
{:ok, sup2} = Exq.start_link(name: CustomManager2)
assert_exq_up(CustomManager2)
stop_process(sup1)
stop_process(sup2)
end
test "enqueue and run job" do
Process.register(self(), :exqtest)
{:ok, sup} = Exq.start_link()
{:ok, _} = Exq.enqueue(Exq, "default", ExqTest.PerformWorker, [])
assert_receive {:worked}
stop_process(sup)
end
test "enqueue and run job via redis sentinel" do
sentinel_args = [
sentinel: [sentinels: [[host: "127.0.0.1", port: 6666]], group: "exq"],
database: 0,
password: nil,
timeout: 5000,
name: Exq.Redis.Client,
socket_opts: []
]
with_application_env(:exq, :redis_options, sentinel_args, fn ->
Process.register(self(), :exqtest)
{:ok, sup} = Exq.start_link()
{:ok, _} = Exq.enqueue(Exq, "default", ExqTest.PerformWorker, [])
assert_receive {:worked}
stop_process(sup)
end)
end
test "run jobs from backup queue on boot" do
host = elem(:inet.gethostname(), 1)
Process.register(self(), :exqtest)
# enqueue and dequeue - this should now be in backup queue
JobQueue.enqueue(:testredis, "test", "queue", ExqTest.PerformWorker, [], [])
JobQueue.dequeue(:testredis, "test", host, ["queue"])
# make sure jobs were requeued from backup queue
{:ok, sup} = Exq.start_link(queues: ["default", "queue"])
wait_long()
assert_received {:worked}
# make sure backup queue was cleared properly if job finished
JobQueue.re_enqueue_backup(:testredis, "test", host, "queue")
wait_long()
refute_received {:worked}
stop_process(sup)
end
test "enqueue_in and run a job" do
Process.register(self(), :exqtest)
{:ok, sup} = Exq.start_link(scheduler_enable: true)
{:ok, _} = Exq.enqueue_in(Exq, "default", 0, ExqTest.PerformWorker, [])
assert_receive {:worked}
stop_process(sup)
end
test "enqueue_at and run a job" do
Process.register(self(), :exqtest)
{:ok, sup} = Exq.start_link(scheduler_enable: true)
{:ok, _} = Exq.enqueue_at(Exq, "default", DateTime.utc_now(), ExqTest.PerformWorker, [])
assert_receive {:worked}
stop_process(sup)
end
test "enqueue with separate enqueuer" do
Process.register(self(), :exqtest)
{:ok, exq_sup} = Exq.start_link()
{:ok, enq_sup} = Exq.start_link(mode: :enqueuer, name: ExqE)
{:ok, _} = Exq.Enqueuer.enqueue(ExqE.Enqueuer, "default", ExqTest.PerformWorker, [])
assert_receive {:worked}
stop_process(exq_sup)
stop_process(enq_sup)
end
test "enqueue with separate enqueuer and api" do
Process.register(self(), :exqtest)
{:ok, exq_sup} = Exq.start_link()
{:ok, enq_sup} = Exq.start_link(mode: [:enqueuer, :api], name: ExqE)
{:ok, _} = Exq.Enqueuer.enqueue(ExqE.Enqueuer, "default", ExqTest.PerformWorker, [])
{:ok, _} = Exq.Api.queues(ExqE.Api)
assert_receive {:worked}
stop_process(exq_sup)
stop_process(enq_sup)
end
test "enqueue with separate enqueuer even if main Exq process is down" do
Process.register(self(), :exqtest)
{:ok, exq_sup} = Exq.start_link()
stop_process(exq_sup)
{:ok, enq_sup} = Exq.start_link(mode: :enqueuer)
{:ok, _} = Exq.Enqueuer.enqueue(Exq.Enqueuer, "default", ExqTest.PerformWorker, [])
stop_process(enq_sup)
{:ok, exq_sup} = Exq.start_link()
assert_receive {:worked}
stop_process(exq_sup)
end
test "run jobs on multiple queues" do
Process.register(self(), :exqtest)
{:ok, sup} = Exq.start_link(queues: ["q1", "q2"])
{:ok, _} = Exq.enqueue(Exq, "q1", ExqTest.PerformArgWorker, [1])
{:ok, _} = Exq.enqueue(Exq, "q2", ExqTest.PerformArgWorker, [2])
assert_receive {:worked, 1}
assert_receive {:worked, 2}
stop_process(sup)
end
test "register queue and run job" do
Process.register(self(), :exqtest)
{:ok, sup} = Exq.start_link(queues: ["q1"])
:ok = Exq.subscribe(Exq, "q2", 10)
{:ok, _} = Exq.enqueue(Exq, "q1", ExqTest.PerformArgWorker, [1])
{:ok, _} = Exq.enqueue(Exq, "q2", ExqTest.PerformArgWorker, [2])
assert_receive {:worked, 1}
assert_receive {:worked, 2}
stop_process(sup)
end
test "unregister queue and run job" do
Process.register(self(), :exqtest)
{:ok, sup} = Exq.start_link(queues: ["q1", "to_remove"])
:ok = Exq.unsubscribe(Exq, "to_remove")
{:ok, _} = Exq.enqueue(Exq, "q1", ExqTest.PerformArgWorker, [1])
{:ok, _} = Exq.enqueue(Exq, "to_remove", ExqTest.PerformArgWorker, [2])
assert_receive {:worked, 1}
refute_receive {:worked, 2}
stop_process(sup)
end
test "unregister all queues and run jobs" do
Process.register(self(), :exqtest)
{:ok, sup} = Exq.start_link(queues: ["q1", "q2"])
:ok = Exq.unsubscribe_all(Exq)
{:ok, _} = Exq.enqueue(Exq, "q1", ExqTest.PerformArgWorker, [1])
{:ok, _} = Exq.enqueue(Exq, "q2", ExqTest.PerformArgWorker, [2])
refute_receive {:worked, 1}
refute_receive {:worked, 2}
stop_process(sup)
end
test "subscriptions when empty" do
Process.register(self(), :exqtest)
{:ok, sup} = Exq.start_link(queues: [])
assert {:ok, []} = Exq.subscriptions(Exq)
stop_process(sup)
end
test "subscriptions when present" do
Process.register(self(), :exqtest)
{:ok, sup} = Exq.start_link(queues: ["q1", "q2"])
assert {:ok, ["q1", "q2"]} = Exq.subscriptions(Exq)
stop_process(sup)
end
test "subscriptions after a new registration" do
Process.register(self(), :exqtest)
{:ok, sup} = Exq.start_link(queues: ["q1"])
:ok = Exq.subscribe(Exq, "q2")
assert {:ok, ["q2", "q1"]} = Exq.subscriptions(Exq)
stop_process(sup)
end
test "subscriptions after an unregistration" do
Process.register(self(), :exqtest)
{:ok, sup} = Exq.start_link(queues: ["q1", "to_unsubscribe"])
:ok = Exq.unsubscribe(Exq, "to_unsubscribe")
assert {:ok, ["q1"]} = Exq.subscriptions(Exq)
stop_process(sup)
end
test "throttle workers per queue" do
Process.register(self(), :exqtest)
{:ok, sup} = Exq.start_link(concurrency: 1, queues: ["q1", "q2"])
{:ok, _} = Exq.enqueue(Exq, "q1", ExqTest.SleepWorker, [40, :worked])
{:ok, _} = Exq.enqueue(Exq, "q1", ExqTest.SleepWorker, [40, :worked2])
{:ok, _} = Exq.enqueue(Exq, "q1", ExqTest.SleepWorker, [100, :finished])
# q2 should be clear
{:ok, _} = Exq.enqueue(Exq, "q2", ExqTest.SleepWorker, [100, :q2_finished])
# Timing specific - we want to ensure only x amount of jobs got done
:timer.sleep(160)
assert_received {"worked"}
assert_received {"worked2"}
refute_received {"finished"}
assert_received {"q2_finished"}
stop_process(sup)
end
test "throttle workers different concurrency per queue" do
Process.register(self(), :exqtest)
{:ok, sup} = Exq.start_link(queues: [{"q1", 1}, {"q2", 20}])
{:ok, _} = Exq.enqueue(Exq, "q1", ExqTest.SleepWorker, [40, :worked])
{:ok, _} = Exq.enqueue(Exq, "q1", ExqTest.SleepWorker, [40, :worked2])
{:ok, _} = Exq.enqueue(Exq, "q1", ExqTest.SleepWorker, [100, :should_not_finish])
# q2 should be clear
{:ok, _} = Exq.enqueue(Exq, "q2", ExqTest.SleepWorker, [100, :q2_work])
{:ok, _} = Exq.enqueue(Exq, "q2", ExqTest.SleepWorker, [100, :q2_work])
{:ok, _} = Exq.enqueue(Exq, "q2", ExqTest.SleepWorker, [100, :q2_work])
{:ok, _} = Exq.enqueue(Exq, "q2", ExqTest.SleepWorker, [100, :q2_finished])
:timer.sleep(150)
assert_received {"worked"}
assert_received {"worked2"}
refute_received {"should_not_finish"}
assert_received {"q2_finished"}
stop_process(sup)
end
test "record processes" do
Process.register(self(), :exqtest)
{:ok, sup} = Exq.start_link(name: ExqP)
state = :sys.get_state(ExqP)
host = Exq.NodeIdentifier.HostnameIdentifier.node_id()
JobStat.node_ping(:testredis, "test", %Node{identity: host, busy: 1})
{:ok, _} = Exq.enqueue(ExqP, "default", ExqTest.SleepWorker, [100, "finished"])
wait_long()
# Check that process has been recorded
processes = Exq.Redis.JobStat.processes(state.redis, "test")
assert Enum.count(processes) == 1
wait_long()
assert_received {"finished"}
# Check that process has been cleared
processes = Exq.Redis.JobStat.processes(state.redis, "test")
assert Enum.count(processes) == 0
{:ok, _} = Exq.enqueue(ExqP, "default", ExqTest.InvalidWorker, [100, "finished"])
wait_long()
# Check that process has been recorded
processes = Exq.Redis.JobStat.processes(state.redis, "test")
assert Enum.count(processes) == 0
stop_process(sup)
end
test "clear processes on boot" do
Process.register(self(), :exqtest)
{:ok, sup} = Exq.start_link(name: ExqP)
state = :sys.get_state(ExqP)
host = Exq.NodeIdentifier.HostnameIdentifier.node_id()
JobStat.node_ping(:testredis, "test", %Node{identity: host, busy: 1})
{:ok, _} = Exq.enqueue(ExqP, "default", ExqTest.SleepLastWorker, [1000, "started"])
wait_long()
assert_received {"started"}
# Check that process has been recorded
processes = Exq.Redis.JobStat.processes(state.redis, "test")
assert Enum.count(processes) == 1
# Clear processes for this node
Exq.Stats.Server.cleanup_host_stats(ExqP.Stats, "test", host)
# Check that process has been cleared
processes = Exq.Redis.JobStat.processes(state.redis, "test")
assert Enum.count(processes) == 0
stop_process(sup)
end
test "record processed jobs" do
{:ok, sup} = Exq.start_link(name: ExqP)
state = :sys.get_state(ExqP)
{:ok, _} = Exq.enqueue(ExqP, "default", ExqTest.EmptyMethodWorker, [])
wait_long()
{:ok, count} = TestStats.processed_count(state.redis, "test")
assert count == "1"
{:ok, _} = Exq.enqueue(ExqP, "default", ExqTest.EmptyMethodWorker, [])
wait_long()
{:ok, count} = TestStats.processed_count(state.redis, "test")
assert count == "2"
stop_process(sup)
end
test "record failed jobs" do
{:ok, sup} = Exq.start_link()
state = :sys.get_state(Exq)
{:ok, _} = Exq.enqueue(Exq, "default", "ExqTest.MissingMethodWorker/fail", [])
wait_long()
{:ok, count} = TestStats.failed_count(state.redis, "test")
assert count == "1"
{:ok, _} = Exq.enqueue(Exq, "default", ExqTest.MissingWorker, [])
wait_long()
{:ok, count} = TestStats.failed_count(state.redis, "test")
assert count == "2"
{:ok, jid} = Exq.enqueue(Exq, "default", "ExqTest.FailWorker/failure_perform", [])
# if we kill Exq too fast we dont record the failure because exq is gone
wait_long()
stop_process(sup)
{:ok, sup} = Exq.start_link(mode: :api)
# Find the job in the processed queue
{:ok, _} = Exq.Api.find_failed(Exq.Api, jid)
wait_long()
stop_process(sup)
end
test "waiting for workers to finish" do
Process.register(self(), :exqtest)
{:ok, sup} = Exq.start_link([])
{:ok, _} = Exq.enqueue(Exq, "default", ExqTest.SleepWorker, [100, :one])
{:ok, _} = Exq.enqueue(Exq, "default", ExqTest.SleepWorker, [100, :two])
wait()
stop_process(sup)
assert_received {"one"}
assert_received {"two"}
end
test "configure worker shutdown time" do
Process.register(self(), :exqtest)
{:ok, sup} = Exq.start_link(shutdown_timeout: 200)
{:ok, _} = Exq.enqueue(Exq, "default", ExqTest.SleepWorker, [500, :long])
{:ok, _} = Exq.enqueue(Exq, "default", ExqTest.SleepWorker, [100, :short])
wait()
stop_process(sup)
refute_received {"long"}
assert_received {"short"}
end
test "handle supervisor tree shutdown properly with stats cleanup" do
Process.register(self(), :exqtest)
{:ok, sup} = Exq.start_link()
# call worker that sends message and sleeps for a bit
{:ok, _jid} = Exq.enqueue(Exq, "default", ExqTest.SleepLastWorker, [300, "worked"])
# wait until worker started
assert_receive {"worked"}, 100
stop_process(sup)
# Make sure everything is shut down properly
assert Process.alive?(sup) == false
assert Process.whereis(Exq.Manager.Server) == nil
assert Process.whereis(Exq.Stats.Server) == nil
assert Process.whereis(Exq.Scheduler.Server) == nil
assert Process.whereis(:sleep_last_worker) == nil
# Check that stats were cleaned up
{:ok, sup} = Exq.start_link()
assert {:ok, []} == Exq.Api.processes(Exq.Api)
stop_process(sup)
end
test "move to dead queue" do
{:ok, sup} = Exq.start_link()
enqueue_fail_job(10)
assert JobQueue.failed_size(:testredis, "test") == 10
stop_process(sup)
end
test "trim dead queue by size" do
{:ok, sup} = Exq.start_link()
with_application_env(:exq, :dead_max_jobs, 5, fn ->
enqueue_fail_job(10)
end)
assert JobQueue.failed_size(:testredis, "test") == 5
stop_process(sup)
end
test "dead queue can be disabled" do
{:ok, sup} = Exq.start_link()
with_application_env(:exq, :dead_max_jobs, 0, fn ->
enqueue_fail_job(10)
end)
assert JobQueue.failed_size(:testredis, "test") == 0
stop_process(sup)
end
test "trim dead queue by timeout" do
{:ok, sup} = Exq.start_link()
with_application_env(:exq, :dead_timeout_in_seconds, 1, fn ->
enqueue_fail_job(10)
assert JobQueue.failed_size(:testredis, "test") == 10
:timer.sleep(1000)
enqueue_fail_job(1)
end)
assert JobQueue.failed_size(:testredis, "test") == 1
stop_process(sup)
end
defp enqueue_fail_job(count) do
for _ <- 0..(count - 1) do
{:ok, _} =
Exq.enqueue(Exq, "default", "ExqTest.MissingMethodWorker/fail", [], max_retries: 0)
end
wait_long()
end
end
| 29.976048 | 92 | 0.647756 |
ffb6e10c19b60f8daefeafff148dc2c9b5a7abe8 | 2,045 | exs | Elixir | config/prod.exs | gissandrogama/contracts_api | 13bcd292637d0e2bc4d2a6c05f5b3266e8bf28e1 | [
"MIT"
] | null | null | null | config/prod.exs | gissandrogama/contracts_api | 13bcd292637d0e2bc4d2a6c05f5b3266e8bf28e1 | [
"MIT"
] | 2 | 2021-03-16T06:43:04.000Z | 2021-03-16T06:54:55.000Z | config/prod.exs | gissandrogama/contracts_api | 13bcd292637d0e2bc4d2a6c05f5b3266e8bf28e1 | [
"MIT"
] | null | null | null | use Mix.Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :contracts_api, ContractsApiWeb.Endpoint,
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :contracts_api, ContractsApiWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH"),
# transport_options: [socket_opts: [:inet6]]
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :contracts_api, ContractsApiWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# Finally import the config/prod.secret.exs which loads secrets
# and configuration from environment variables.
import_config "prod.secret.exs"
| 36.517857 | 66 | 0.719315 |
ffb6ebd096418b7fef26486a7ba0a4b6898182b2 | 1,980 | exs | Elixir | config/config.exs | bglusman/elixir-companies | d6a728cf9136888367e261feb876c5d2a266ed57 | [
"MIT"
] | null | null | null | config/config.exs | bglusman/elixir-companies | d6a728cf9136888367e261feb876c5d2a266ed57 | [
"MIT"
] | null | null | null | config/config.exs | bglusman/elixir-companies | d6a728cf9136888367e261feb876c5d2a266ed57 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
use Mix.Config
config :companies, ecto_repos: [Companies.Repo]
# Configures the endpoint
config :companies, CompaniesWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "cnFp+p3HcWa0ZaS5YhEfuJlU2PIxvUinNThsTSXm4ZE2M7D/zYzpfIJGMVNLHtqv",
render_errors: [view: CompaniesWeb.ErrorView, accepts: ~w(html json)],
pubsub_server: Companies.PubSub,
live_view: [signing_salt: "IJL0bF+zIE2Ax4MFSi16HqrurNFhiYlD"]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix and Ecto
config :phoenix, :json_library, Jason
config :companies,
notifier: Notify.Console,
site_data: %{
name: "Elixir Companies"
},
results_per_page: 16
config :oauth2,
serializers: %{
"application/json" => Jason
}
config :ueberauth, Ueberauth,
providers: [
github: {Ueberauth.Strategy.Github, [default_scope: "user:email", send_redirect_uri: false]}
]
config :ueberauth, Ueberauth.Strategy.Github.OAuth,
client_id: System.get_env("GITHUB_CLIENT_ID"),
client_secret: System.get_env("GITHUB_CLIENT_SECRET")
config :scrivener_html,
routes_helper: CompaniesWeb.Router.Helpers,
view_style: :bulma
config :phoenix, :template_engines,
eex: Appsignal.Phoenix.Template.EExEngine,
exs: Appsignal.Phoenix.Template.ExsEngine
config :companies, Notify.Mailer, adapter: Bamboo.LocalAdapter
config :live_dashboard_history, LiveDashboardHistory,
router: CompaniesWeb.Router,
metrics: CompaniesWeb.Telemetry,
buffer_size: 500
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 29.552239 | 96 | 0.769697 |
ffb70e41571e06479529de58df4c02a25378aa33 | 2,498 | ex | Elixir | lib/logger_json/formatters/basic_logger.ex | coingaming/logger_json | 05000f70e6964fdf37f3847a046639d3a8587598 | [
"MIT"
] | null | null | null | lib/logger_json/formatters/basic_logger.ex | coingaming/logger_json | 05000f70e6964fdf37f3847a046639d3a8587598 | [
"MIT"
] | null | null | null | lib/logger_json/formatters/basic_logger.ex | coingaming/logger_json | 05000f70e6964fdf37f3847a046639d3a8587598 | [
"MIT"
] | 1 | 2021-07-02T15:54:21.000Z | 2021-07-02T15:54:21.000Z | defmodule LoggerJSON.Formatters.BasicLogger do
@moduledoc """
Basic JSON log formatter with no vender specific formatting
"""
import Jason.Helpers, only: [json_map: 1]
alias LoggerJSON.FormatterUtils
@behaviour LoggerJSON.Formatter
# @processed_metadata_keys ~w[pid file line function module application]a
@impl true
def format_event(level, msg, ts, md, md_keys) do
json_map(
time: FormatterUtils.format_timestamp(ts),
severity: Atom.to_string(level),
message: format_message(msg),
metadata: format_metadata(md, md_keys)
)
end
defp format_message(msg) do
msg =
msg
|> IO.iodata_to_binary()
if String.valid?(msg) && String.printable?(msg) do
msg
else
inspect(msg)
end
end
defp format_metadata(md, md_keys) do
md
# |> LoggerJSON.take_metadata(md_keys, @processed_metadata_keys)
|> LoggerJSON.take_metadata(md_keys)
|> format_data()
|> FormatterUtils.maybe_put(:error, FormatterUtils.format_process_crash(md))
end
defp format_data(%Jason.Fragment{} = data) do
data
end
defp format_data(%DateTime{} = data) do
DateTime.to_iso8601(data)
end
defp format_data(%Date{} = data) do
Date.to_iso8601(data)
end
defp format_data(%mod{} = data) do
new_data =
data
|> Map.from_struct()
|> Map.keys()
|> Enum.reduce(data, fn key, acc ->
Map.put(acc, key, format_data(Map.get(data, key)))
end)
if jason_implemented?(mod) do
new_data
else
Map.from_struct(new_data)
end
end
defp format_data(%{} = data) do
data
|> Map.keys()
|> Enum.reduce(data, fn key, acc ->
Map.put(acc, key, format_data(Map.get(data, key)))
end)
end
defp format_data({key, data}) when is_binary(key) or is_atom(key) do
%{key => format_data(data)}
end
defp format_data(data)
when is_list(data) or is_tuple(data) or is_reference(data) or is_port(data) or is_pid(data) or is_function(data) or
(is_bitstring(data) and not is_binary(data)) do
inspect(data, pretty: true, width: 70)
end
defp format_data(data) when is_binary(data) do
if String.valid?(data) && String.printable?(data) do
data
else
inspect(data)
end
end
defp format_data(data), do: data
def jason_implemented?(mod) do
try do
:ok = Protocol.assert_impl!(Jason.Encoder, mod)
true
rescue
ArgumentError ->
false
end
end
end
| 22.709091 | 122 | 0.649319 |
ffb70e6d0140f5e73c177d5724c40fac81442e87 | 34,681 | ex | Elixir | lib/ecto/adapters/postgres/connection.ex | mbuhot/ecto | e6c4c7df0af055ba4bae8737908b98ae85352d2f | [
"Apache-2.0"
] | null | null | null | lib/ecto/adapters/postgres/connection.ex | mbuhot/ecto | e6c4c7df0af055ba4bae8737908b98ae85352d2f | [
"Apache-2.0"
] | null | null | null | lib/ecto/adapters/postgres/connection.ex | mbuhot/ecto | e6c4c7df0af055ba4bae8737908b98ae85352d2f | [
"Apache-2.0"
] | null | null | null | if Code.ensure_loaded?(Postgrex) do
Postgrex.Types.define(Ecto.Adapters.Postgres.TypeModule,
Ecto.Adapters.Postgres.extensions(),
json: Application.get_env(:ecto, :json_library, Poison))
defmodule Ecto.Adapters.Postgres.Connection do
@moduledoc false
@default_port 5432
@behaviour Ecto.Adapters.SQL.Connection
## Module and Options
def child_spec(opts) do
opts
|> Keyword.put_new(:port, @default_port)
|> Keyword.put_new(:types, Ecto.Adapters.Postgres.TypeModule)
|> Postgrex.child_spec()
end
def to_constraints(%Postgrex.Error{postgres: %{code: :unique_violation, constraint: constraint}}),
do: [unique: constraint]
def to_constraints(%Postgrex.Error{postgres: %{code: :foreign_key_violation, constraint: constraint}}),
do: [foreign_key: constraint]
def to_constraints(%Postgrex.Error{postgres: %{code: :exclusion_violation, constraint: constraint}}),
do: [exclude: constraint]
def to_constraints(%Postgrex.Error{postgres: %{code: :check_violation, constraint: constraint}}),
do: [check: constraint]
# Postgres 9.2 and earlier does not provide the constraint field
def to_constraints(%Postgrex.Error{postgres: %{code: :unique_violation, message: message}}) do
case :binary.split(message, " unique constraint ") do
[_, quoted] -> [unique: strip_quotes(quoted)]
_ -> []
end
end
def to_constraints(%Postgrex.Error{postgres: %{code: :foreign_key_violation, message: message}}) do
case :binary.split(message, " foreign key constraint ") do
[_, quoted] ->
[quoted | _] = :binary.split(quoted, " on table ")
[foreign_key: strip_quotes(quoted)]
_ ->
[]
end
end
def to_constraints(%Postgrex.Error{postgres: %{code: :exclusion_violation, message: message}}) do
case :binary.split(message, " exclusion constraint ") do
[_, quoted] -> [exclude: strip_quotes(quoted)]
_ -> []
end
end
def to_constraints(%Postgrex.Error{postgres: %{code: :check_violation, message: message}}) do
case :binary.split(message, " check constraint ") do
[_, quoted] -> [check: strip_quotes(quoted)]
_ -> []
end
end
def to_constraints(%Postgrex.Error{}),
do: []
defp strip_quotes(quoted) do
size = byte_size(quoted) - 2
<<_, unquoted::binary-size(size), _>> = quoted
unquoted
end
## Query
def prepare_execute(conn, name, sql, params, opts) do
query = %Postgrex.Query{name: name, statement: sql}
opts = [function: :prepare_execute] ++ opts
case DBConnection.prepare_execute(conn, query, params, opts) do
{:ok, _, _} = ok ->
ok
{:error, %Postgrex.Error{}} = error ->
error
{:error, err} ->
raise err
end
end
def execute(conn, sql, params, opts) when is_binary(sql) or is_list(sql) do
query = %Postgrex.Query{name: "", statement: sql}
opts = [function: :prepare_execute] ++ opts
case DBConnection.prepare_execute(conn, query, params, opts) do
{:ok, _, result} ->
{:ok, result}
{:error, %Postgrex.Error{}} = error ->
error
{:error, err} ->
raise err
end
end
def execute(conn, %{} = query, params, opts) do
opts = [function: :execute] ++ opts
case DBConnection.execute(conn, query, params, opts) do
{:ok, _} = ok ->
ok
{:error, %ArgumentError{} = err} ->
{:reset, err}
{:error, %Postgrex.Error{postgres: %{code: :feature_not_supported}} = err} ->
{:reset, err}
{:error, %Postgrex.Error{}} = error ->
error
{:error, err} ->
raise err
end
end
def stream(conn, sql, params, opts) do
Postgrex.stream(conn, sql, params, opts)
end
alias Ecto.Query
alias Ecto.Query.{BooleanExpr, JoinExpr, QueryExpr}
def all(query) do
sources = create_names(query)
{select_distinct, order_by_distinct} = distinct(query.distinct, sources, query)
from = from(query, sources)
select = select(query, select_distinct, sources)
join = join(query, sources)
where = where(query, sources)
group_by = group_by(query, sources)
having = having(query, sources)
order_by = order_by(query, order_by_distinct, sources)
limit = limit(query, sources)
offset = offset(query, sources)
lock = lock(query.lock)
IO.iodata_to_binary([select, from, join, where, group_by, having, order_by, limit, offset | lock])
end
def update_all(%{from: from} = query, prefix \\ nil) do
sources = create_names(query)
{from, name} = get_source(query, sources, 0, from)
prefix = prefix || ["UPDATE ", from, " AS ", name | " SET "]
fields = update_fields(query, sources)
{join, wheres} = using_join(query, :update_all, "FROM", sources)
where = where(%{query | wheres: wheres ++ query.wheres}, sources)
IO.iodata_to_binary([prefix, fields, join, where | returning(query, sources)])
end
def delete_all(%{from: from} = query) do
sources = create_names(query)
{from, name} = get_source(query, sources, 0, from)
{join, wheres} = using_join(query, :delete_all, "USING", sources)
where = where(%{query | wheres: wheres ++ query.wheres}, sources)
IO.iodata_to_binary(["DELETE FROM ", from, " AS ", name, join, where | returning(query, sources)])
end
def insert(prefix, table, header, rows, on_conflict, returning) do
values =
if header == [] do
[" VALUES " | intersperse_map(rows, ?,, fn _ -> "(DEFAULT)" end)]
else
[?\s, ?(, intersperse_map(header, ?,, "e_name/1), ") VALUES " | insert_all(rows, 1)]
end
IO.iodata_to_binary(["INSERT INTO ", quote_table(prefix, table), insert_as(on_conflict),
values, on_conflict(on_conflict, header) | returning(returning)])
end
defp insert_as({%{from: from} = query, _, _}) do
{_, name} = get_source(%{query | joins: []}, create_names(query), 0, from)
[" AS " | name]
end
defp insert_as({_, _, _}) do
[]
end
defp on_conflict({:raise, _, []}, _header),
do: []
defp on_conflict({:nothing, _, targets}, _header),
do: [" ON CONFLICT ", conflict_target(targets) | "DO NOTHING"]
defp on_conflict({:replace_all, _, targets}, header),
do: [" ON CONFLICT ", conflict_target(targets), "DO " | replace_all(header)]
defp on_conflict({query, _, targets}, _header),
do: [" ON CONFLICT ", conflict_target(targets), "DO " | update_all(query, "UPDATE SET ")]
defp conflict_target([]),
do: []
defp conflict_target(targets),
do: [?(, intersperse_map(targets, ?,, "e_name/1), ?), ?\s]
defp replace_all(header) do
["UPDATE SET " |
intersperse_map(header, ?,, fn field ->
quoted = quote_name(field)
[quoted, " = ", "EXCLUDED." | quoted]
end)]
end
defp insert_all(rows, counter) do
intersperse_reduce(rows, ?,, counter, fn row, counter ->
{row, counter} = insert_each(row, counter)
{[?(, row, ?)], counter}
end)
|> elem(0)
end
defp insert_each(values, counter) do
intersperse_reduce(values, ?,, counter, fn
nil, counter ->
{"DEFAULT", counter}
_, counter ->
{[?$ | Integer.to_string(counter)], counter + 1}
end)
end
def update(prefix, table, fields, filters, returning) do
{fields, count} = intersperse_reduce(fields, ", ", 1, fn field, acc ->
{[quote_name(field), " = $" | Integer.to_string(acc)], acc + 1}
end)
{filters, _count} = intersperse_reduce(filters, " AND ", count, fn field, acc ->
{[quote_name(field), " = $" | Integer.to_string(acc)], acc + 1}
end)
IO.iodata_to_binary(["UPDATE ", quote_table(prefix, table), " SET ",
fields, " WHERE ", filters | returning(returning)])
end
def delete(prefix, table, filters, returning) do
{filters, _} = intersperse_reduce(filters, " AND ", 1, fn field, acc ->
{[quote_name(field), " = $" | Integer.to_string(acc)], acc + 1}
end)
IO.iodata_to_binary(["DELETE FROM ", quote_table(prefix, table), " WHERE ",
filters | returning(returning)])
end
## Query generation
binary_ops =
[==: " = ", !=: " != ", <=: " <= ", >=: " >= ", <: " < ", >: " > ",
and: " AND ", or: " OR ", ilike: " ILIKE ", like: " LIKE "]
@binary_ops Keyword.keys(binary_ops)
Enum.map(binary_ops, fn {op, str} ->
defp handle_call(unquote(op), 2), do: {:binary_op, unquote(str)}
end)
defp handle_call(fun, _arity), do: {:fun, Atom.to_string(fun)}
defp select(%Query{select: %{fields: fields}} = query, select_distinct, sources) do
["SELECT", select_distinct, ?\s | select_fields(fields, sources, query)]
end
defp select_fields([], _sources, _query),
do: "TRUE"
defp select_fields(fields, sources, query) do
intersperse_map(fields, ", ", fn
{key, value} ->
[expr(value, sources, query), " AS " | quote_name(key)]
value ->
expr(value, sources, query)
end)
end
defp distinct(nil, _, _), do: {[], []}
defp distinct(%QueryExpr{expr: []}, _, _), do: {[], []}
defp distinct(%QueryExpr{expr: true}, _, _), do: {" DISTINCT", []}
defp distinct(%QueryExpr{expr: false}, _, _), do: {[], []}
defp distinct(%QueryExpr{expr: exprs}, sources, query) do
{[" DISTINCT ON (",
intersperse_map(exprs, ", ", fn {_, expr} -> expr(expr, sources, query) end), ?)],
exprs}
end
defp from(%{from: from} = query, sources) do
{from, name} = get_source(query, sources, 0, from)
[" FROM ", from, " AS " | name]
end
defp update_fields(%Query{updates: updates} = query, sources) do
for(%{expr: expr} <- updates,
{op, kw} <- expr,
{key, value} <- kw,
do: update_op(op, key, value, sources, query)) |> Enum.intersperse(", ")
end
defp update_op(:set, key, value, sources, query) do
[quote_name(key), " = " | expr(value, sources, query)]
end
defp update_op(:inc, key, value, sources, query) do
[quote_name(key), " = ", quote_qualified_name(key, sources, 0), " + " |
expr(value, sources, query)]
end
defp update_op(:push, key, value, sources, query) do
[quote_name(key), " = array_append(", quote_qualified_name(key, sources, 0),
", ", expr(value, sources, query), ?)]
end
defp update_op(:pull, key, value, sources, query) do
[quote_name(key), " = array_remove(", quote_qualified_name(key, sources, 0),
", ", expr(value, sources, query), ?)]
end
defp update_op(command, _key, _value, _sources, query) do
error!(query, "Unknown update operation #{inspect command} for PostgreSQL")
end
defp using_join(%Query{joins: []}, _kind, _prefix, _sources), do: {[], []}
defp using_join(%Query{joins: joins} = query, kind, prefix, sources) do
froms =
intersperse_map(joins, ", ", fn
%JoinExpr{qual: :inner, ix: ix, source: source} ->
{join, name} = get_source(query, sources, ix, source)
[join, " AS " | name]
%JoinExpr{qual: qual} ->
error!(query, "PostgreSQL supports only inner joins on #{kind}, got: `#{qual}`")
end)
wheres =
for %JoinExpr{on: %QueryExpr{expr: value} = expr} <- joins,
value != true,
do: expr |> Map.put(:__struct__, BooleanExpr) |> Map.put(:op, :and)
{[?\s, prefix, ?\s | froms], wheres}
end
defp join(%Query{joins: []}, _sources), do: []
defp join(%Query{joins: joins} = query, sources) do
[?\s | intersperse_map(joins, ?\s, fn
%JoinExpr{on: %QueryExpr{expr: expr}, qual: qual, ix: ix, source: source} ->
{join, name} = get_source(query, sources, ix, source)
[join_qual(qual), join, " AS ", name, " ON " | expr(expr, sources, query)]
end)]
end
defp join_qual(:inner), do: "INNER JOIN "
defp join_qual(:inner_lateral), do: "INNER JOIN LATERAL "
defp join_qual(:left), do: "LEFT OUTER JOIN "
defp join_qual(:left_lateral), do: "LEFT OUTER JOIN LATERAL "
defp join_qual(:right), do: "RIGHT OUTER JOIN "
defp join_qual(:full), do: "FULL OUTER JOIN "
defp join_qual(:cross), do: "CROSS JOIN "
defp where(%Query{wheres: wheres} = query, sources) do
boolean(" WHERE ", wheres, sources, query)
end
defp having(%Query{havings: havings} = query, sources) do
boolean(" HAVING ", havings, sources, query)
end
defp group_by(%Query{group_bys: []}, _sources), do: []
defp group_by(%Query{group_bys: group_bys} = query, sources) do
[" GROUP BY " |
intersperse_map(group_bys, ", ", fn
%QueryExpr{expr: expr} ->
intersperse_map(expr, ", ", &expr(&1, sources, query))
end)]
end
defp order_by(%Query{order_bys: []}, _distinct, _sources), do: []
defp order_by(%Query{order_bys: order_bys} = query, distinct, sources) do
order_bys = Enum.flat_map(order_bys, & &1.expr)
[" ORDER BY " |
intersperse_map(distinct ++ order_bys, ", ", &order_by_expr(&1, sources, query))]
end
defp order_by_expr({dir, expr}, sources, query) do
str = expr(expr, sources, query)
case dir do
:asc -> str
:desc -> [str | " DESC"]
end
end
defp limit(%Query{limit: nil}, _sources), do: []
defp limit(%Query{limit: %QueryExpr{expr: expr}} = query, sources) do
[" LIMIT " | expr(expr, sources, query)]
end
defp offset(%Query{offset: nil}, _sources), do: []
defp offset(%Query{offset: %QueryExpr{expr: expr}} = query, sources) do
[" OFFSET " | expr(expr, sources, query)]
end
defp lock(nil), do: []
defp lock(lock_clause), do: [?\s | lock_clause]
defp boolean(_name, [], _sources, _query), do: []
defp boolean(name, [%{expr: expr, op: op} | query_exprs], sources, query) do
[name |
Enum.reduce(query_exprs, {op, paren_expr(expr, sources, query)}, fn
%BooleanExpr{expr: expr, op: op}, {op, acc} ->
{op, [acc, operator_to_boolean(op), paren_expr(expr, sources, query)]}
%BooleanExpr{expr: expr, op: op}, {_, acc} ->
{op, [?(, acc, ?), operator_to_boolean(op), paren_expr(expr, sources, query)]}
end) |> elem(1)]
end
defp operator_to_boolean(:and), do: " AND "
defp operator_to_boolean(:or), do: " OR "
defp paren_expr(expr, sources, query) do
[?(, expr(expr, sources, query), ?)]
end
defp expr({:^, [], [ix]}, _sources, _query) do
[?$ | Integer.to_string(ix + 1)]
end
defp expr({{:., _, [{:&, _, [idx]}, field]}, _, []}, sources, _query) when is_atom(field) do
quote_qualified_name(field, sources, idx)
end
defp expr({:&, _, [idx, fields, _counter]}, sources, query) do
{source, name, schema} = elem(sources, idx)
if is_nil(schema) and is_nil(fields) do
error!(query, "PostgreSQL does not support selecting all fields from #{source} without a schema. " <>
"Please specify a schema or specify exactly which fields you want to select")
end
intersperse_map(fields, ", ", &[name, ?. | quote_name(&1)])
end
defp expr({:in, _, [_left, []]}, _sources, _query) do
"false"
end
defp expr({:in, _, [left, right]}, sources, query) when is_list(right) do
args = intersperse_map(right, ?,, &expr(&1, sources, query))
[expr(left, sources, query), " IN (", args, ?)]
end
defp expr({:in, _, [left, {:^, _, [ix, _]}]}, sources, query) do
[expr(left, sources, query), " = ANY($", Integer.to_string(ix + 1), ?)]
end
defp expr({:in, _, [left, right]}, sources, query) do
[expr(left, sources, query), " = ANY(", expr(right, sources, query), ?)]
end
defp expr({:is_nil, _, [arg]}, sources, query) do
[expr(arg, sources, query) | " IS NULL"]
end
defp expr({:not, _, [expr]}, sources, query) do
["NOT (", expr(expr, sources, query), ?)]
end
defp expr(%Ecto.SubQuery{query: query, fields: fields}, _sources, _query) do
query.select.fields |> put_in(fields) |> all()
end
defp expr({:fragment, _, [kw]}, _sources, query) when is_list(kw) or tuple_size(kw) == 3 do
error!(query, "PostgreSQL adapter does not support keyword or interpolated fragments")
end
defp expr({:fragment, _, parts}, sources, query) do
Enum.map(parts, fn
{:raw, part} -> part
{:expr, expr} -> expr(expr, sources, query)
end)
end
defp expr({:datetime_add, _, [datetime, count, interval]}, sources, query) do
[?(, expr(datetime, sources, query), "::timestamp + ",
interval(count, interval, sources, query) | ")::timestamp"]
end
defp expr({:date_add, _, [date, count, interval]}, sources, query) do
[?(, expr(date, sources, query), "::date + ",
interval(count, interval, sources, query) | ")::date"]
end
defp expr({fun, _, args}, sources, query) when is_atom(fun) and is_list(args) do
{modifier, args} =
case args do
[rest, :distinct] -> {"DISTINCT ", [rest]}
_ -> {[], args}
end
case handle_call(fun, length(args)) do
{:binary_op, op} ->
[left, right] = args
[op_to_binary(left, sources, query), op | op_to_binary(right, sources, query)]
{:fun, fun} ->
[fun, ?(, modifier, intersperse_map(args, ", ", &expr(&1, sources, query)), ?)]
end
end
defp expr(list, sources, query) when is_list(list) do
["ARRAY[", intersperse_map(list, ?,, &expr(&1, sources, query)), ?]]
end
defp expr(%Decimal{} = decimal, _sources, _query) do
Decimal.to_string(decimal, :normal)
end
defp expr(%Ecto.Query.Tagged{value: binary, type: :binary}, _sources, _query)
when is_binary(binary) do
["'\\x", Base.encode16(binary, case: :lower) | "'::bytea"]
end
defp expr(%Ecto.Query.Tagged{value: other, type: type}, sources, query) do
[expr(other, sources, query), ?:, ?: | ecto_to_db(type)]
end
defp expr(nil, _sources, _query), do: "NULL"
defp expr(true, _sources, _query), do: "TRUE"
defp expr(false, _sources, _query), do: "FALSE"
defp expr(literal, _sources, _query) when is_binary(literal) do
[?\', escape_string(literal), ?\']
end
defp expr(literal, _sources, _query) when is_integer(literal) do
Integer.to_string(literal)
end
defp expr(literal, _sources, _query) when is_float(literal) do
[Float.to_string(literal) | "::float"]
end
defp interval(count, interval, _sources, _query) when is_integer(count) do
["interval '", String.Chars.Integer.to_string(count), ?\s, interval, ?\']
end
defp interval(count, interval, _sources, _query) when is_float(count) do
count = :erlang.float_to_binary(count, [:compact, decimals: 16])
["interval '", count, ?\s, interval, ?\']
end
defp interval(count, interval, sources, query) do
[?(, expr(count, sources, query), "::numeric * ",
interval(1, interval, sources, query), ?)]
end
defp op_to_binary({op, _, [_, _]} = expr, sources, query) when op in @binary_ops do
paren_expr(expr, sources, query)
end
defp op_to_binary(expr, sources, query) do
expr(expr, sources, query)
end
defp returning(%Query{select: nil}, _sources),
do: []
defp returning(%Query{select: %{fields: fields}} = query, sources),
do: [" RETURNING " | select_fields(fields, sources, query)]
defp returning([]),
do: []
defp returning(returning),
do: [" RETURNING " | intersperse_map(returning, ", ", "e_name/1)]
defp create_names(%{prefix: prefix, sources: sources}) do
create_names(prefix, sources, 0, tuple_size(sources)) |> List.to_tuple()
end
defp create_names(prefix, sources, pos, limit) when pos < limit do
current =
case elem(sources, pos) do
{table, schema} ->
name = [String.first(table) | Integer.to_string(pos)]
{quote_table(prefix, table), name, schema}
{:fragment, _, _} ->
{nil, [?f | Integer.to_string(pos)], nil}
%Ecto.SubQuery{} ->
{nil, [?s | Integer.to_string(pos)], nil}
end
[current | create_names(prefix, sources, pos + 1, limit)]
end
defp create_names(_prefix, _sources, pos, pos) do
[]
end
# DDL
alias Ecto.Migration.{Table, Index, Reference, Constraint}
@drops [:drop, :drop_if_exists]
def execute_ddl({command, %Table{} = table, columns}) when command in [:create, :create_if_not_exists] do
table_name = quote_table(table.prefix, table.name)
query = ["CREATE TABLE ",
if_do(command == :create_if_not_exists, "IF NOT EXISTS "),
table_name, ?\s, ?(,
column_definitions(table, columns), pk_definition(columns, ", "), ?),
options_expr(table.options)]
[query] ++
comments_on("TABLE", table_name, table.comment) ++
comments_for_columns(table_name, columns)
end
def execute_ddl({command, %Table{} = table}) when command in @drops do
[["DROP TABLE ", if_do(command == :drop_if_exists, "IF EXISTS "),
quote_table(table.prefix, table.name)]]
end
def execute_ddl({:alter, %Table{} = table, changes}) do
table_name = quote_table(table.prefix, table.name)
query = ["ALTER TABLE ", table_name, ?\s,
column_changes(table, changes), pk_definition(changes, ", ADD ")]
[query] ++
comments_on("TABLE", table_name, table.comment) ++
comments_for_columns(table_name, changes)
end
def execute_ddl({:create, %Index{} = index}) do
fields = intersperse_map(index.columns, ", ", &index_expr/1)
queries = [["CREATE ",
if_do(index.unique, "UNIQUE "),
"INDEX ",
if_do(index.concurrently, "CONCURRENTLY "),
quote_name(index.name),
" ON ",
quote_table(index.prefix, index.table),
if_do(index.using, [" USING " , to_string(index.using)]),
?\s, ?(, fields, ?),
if_do(index.where, [" WHERE ", to_string(index.where)])]]
queries ++ comments_on("INDEX", quote_name(index.name), index.comment)
end
def execute_ddl({:create_if_not_exists, %Index{} = index}) do
[["DO $$ BEGIN ",
execute_ddl({:create, index}), ";",
"EXCEPTION WHEN duplicate_table THEN END; $$;"]]
end
def execute_ddl({command, %Index{} = index}) when command in @drops do
if_exists = if command == :drop_if_exists, do: "IF EXISTS ", else: []
[["DROP INDEX ",
if_do(index.concurrently, "CONCURRENTLY "),
if_exists,
quote_table(index.prefix, index.name)]]
end
def execute_ddl({:rename, %Table{} = current_table, %Table{} = new_table}) do
[["ALTER TABLE ", quote_table(current_table.prefix, current_table.name),
" RENAME TO ", quote_table(nil, new_table.name)]]
end
def execute_ddl({:rename, %Table{} = table, current_column, new_column}) do
[["ALTER TABLE ", quote_table(table.prefix, table.name), " RENAME ",
quote_name(current_column), " TO ", quote_name(new_column)]]
end
def execute_ddl({:create, %Constraint{} = constraint}) do
table_name = quote_table(constraint.prefix, constraint.table)
queries = [["ALTER TABLE ", table_name,
" ADD ", new_constraint_expr(constraint)]]
queries ++ comments_on("CONSTRAINT", constraint.name, constraint.comment, table_name)
end
def execute_ddl({:drop, %Constraint{} = constraint}) do
[["ALTER TABLE ", quote_table(constraint.prefix, constraint.table),
" DROP CONSTRAINT ", quote_name(constraint.name)]]
end
def execute_ddl(string) when is_binary(string), do: [string]
def execute_ddl(keyword) when is_list(keyword),
do: error!(nil, "PostgreSQL adapter does not support keyword lists in execute")
defp pk_definition(columns, prefix) do
pks =
for {_, name, _, opts} <- columns,
opts[:primary_key],
do: name
case pks do
[] -> []
_ -> [prefix, "PRIMARY KEY (", intersperse_map(pks, ", ", "e_name/1), ")"]
end
end
defp comments_on(_object, _name, nil), do: []
defp comments_on(object, name, comment) do
[["COMMENT ON ", object, ?\s, name, " IS ", single_quote(comment)]]
end
defp comments_on(_object, _name, nil, _table_name), do: []
defp comments_on(object, name, comment, table_name) do
[["COMMENT ON ", object, ?\s, quote_name(name), " ON ", table_name,
" IS ", single_quote(comment)]]
end
defp comments_for_columns(table_name, columns) do
Enum.flat_map(columns, fn
{_operation, column_name, _column_type, opts} ->
column_name = [table_name, ?. | quote_name(column_name)]
comments_on("COLUMN", column_name, opts[:comment])
_ -> []
end)
end
defp column_definitions(table, columns) do
intersperse_map(columns, ", ", &column_definition(table, &1))
end
defp column_definition(table, {:add, name, %Reference{} = ref, opts}) do
[quote_name(name), ?\s, reference_column_type(ref.type, opts),
column_options(ref.type, opts), reference_expr(ref, table, name)]
end
defp column_definition(_table, {:add, name, type, opts}) do
[quote_name(name), ?\s, column_type(type, opts), column_options(type, opts)]
end
defp column_changes(table, columns) do
intersperse_map(columns, ", ", &column_change(table, &1))
end
defp column_change(table, {:add, name, %Reference{} = ref, opts}) do
["ADD COLUMN ", quote_name(name), ?\s, reference_column_type(ref.type, opts),
column_options(ref.type, opts), reference_expr(ref, table, name)]
end
defp column_change(_table, {:add, name, type, opts}) do
["ADD COLUMN ", quote_name(name), ?\s, column_type(type, opts),
column_options(type, opts)]
end
defp column_change(table, {:modify, name, %Reference{} = ref, opts}) do
["ALTER COLUMN ", quote_name(name), " TYPE ", reference_column_type(ref.type, opts),
constraint_expr(ref, table, name), modify_null(name, opts), modify_default(name, ref.type, opts)]
end
defp column_change(_table, {:modify, name, type, opts}) do
["ALTER COLUMN ", quote_name(name), " TYPE ",
column_type(type, opts), modify_null(name, opts), modify_default(name, type, opts)]
end
defp column_change(_table, {:remove, name}), do: ["DROP COLUMN ", quote_name(name)]
defp modify_null(name, opts) do
case Keyword.get(opts, :null) do
true -> [", ALTER COLUMN ", quote_name(name), " DROP NOT NULL"]
false -> [", ALTER COLUMN ", quote_name(name), " SET NOT NULL"]
nil -> []
end
end
defp modify_default(name, type, opts) do
case Keyword.fetch(opts, :default) do
{:ok, val} -> [", ALTER COLUMN ", quote_name(name), " SET", default_expr({:ok, val}, type)]
:error -> []
end
end
defp column_options(type, opts) do
default = Keyword.fetch(opts, :default)
null = Keyword.get(opts, :null)
[default_expr(default, type), null_expr(null)]
end
defp null_expr(false), do: " NOT NULL"
defp null_expr(true), do: " NULL"
defp null_expr(_), do: []
defp new_constraint_expr(%Constraint{check: check} = constraint) when is_binary(check) do
["CONSTRAINT ", quote_name(constraint.name), " CHECK (", check, ")"]
end
defp new_constraint_expr(%Constraint{exclude: exclude} = constraint) when is_binary(exclude) do
["CONSTRAINT ", quote_name(constraint.name), " EXCLUDE USING ", exclude]
end
defp default_expr({:ok, nil}, _type),
do: " DEFAULT NULL"
defp default_expr({:ok, []}, type),
do: [" DEFAULT ARRAY[]::", ecto_to_db(type)]
defp default_expr({:ok, literal}, _type) when is_binary(literal),
do: [" DEFAULT '", escape_string(literal), ?']
defp default_expr({:ok, literal}, _type) when is_number(literal) or is_boolean(literal),
do: [" DEFAULT ", to_string(literal)]
defp default_expr({:ok, {:fragment, expr}}, _type),
do: [" DEFAULT ", expr]
defp default_expr({:ok, expr}, type),
do: raise(ArgumentError, "unknown default `#{inspect expr}` for type `#{inspect type}`. " <>
":default may be a string, number, boolean, empty list or a fragment(...)")
defp default_expr(:error, _),
do: []
defp index_expr(literal) when is_binary(literal),
do: [?(, literal, ?)]
defp index_expr(literal),
do: quote_name(literal)
defp options_expr(nil),
do: []
defp options_expr(keyword) when is_list(keyword),
do: error!(nil, "PostgreSQL adapter does not support keyword lists in :options")
defp options_expr(options),
do: [?\s, options]
defp column_type({:array, type}, opts),
do: [column_type(type, opts), "[]"]
defp column_type(type, opts) do
size = Keyword.get(opts, :size)
precision = Keyword.get(opts, :precision)
scale = Keyword.get(opts, :scale)
type_name = ecto_to_db(type)
cond do
size -> [type_name, ?(, to_string(size), ?)]
precision -> [type_name, ?(, to_string(precision), ?,, to_string(scale || 0), ?)]
type == :string -> [type_name, "(255)"]
true -> type_name
end
end
defp reference_expr(%Reference{} = ref, table, name),
do: [" CONSTRAINT ", reference_name(ref, table, name), " REFERENCES ",
quote_table(table.prefix, ref.table), ?(, quote_name(ref.column), ?),
reference_on_delete(ref.on_delete), reference_on_update(ref.on_update)]
defp constraint_expr(%Reference{} = ref, table, name),
do: [", ADD CONSTRAINT ", reference_name(ref, table, name), ?\s,
"FOREIGN KEY (", quote_name(name),
") REFERENCES ", quote_table(table.prefix, ref.table), ?(, quote_name(ref.column), ?),
reference_on_delete(ref.on_delete), reference_on_update(ref.on_update)]
defp reference_name(%Reference{name: nil}, table, column),
do: quote_name("#{table.name}_#{column}_fkey")
defp reference_name(%Reference{name: name}, _table, _column),
do: quote_name(name)
defp reference_column_type(:serial, _opts), do: "integer"
defp reference_column_type(:bigserial, _opts), do: "bigint"
defp reference_column_type(type, opts), do: column_type(type, opts)
defp reference_on_delete(:nilify_all), do: " ON DELETE SET NULL"
defp reference_on_delete(:delete_all), do: " ON DELETE CASCADE"
defp reference_on_delete(_), do: []
defp reference_on_update(:nilify_all), do: " ON UPDATE SET NULL"
defp reference_on_update(:update_all), do: " ON UPDATE CASCADE"
defp reference_on_update(_), do: []
## Helpers
defp get_source(query, sources, ix, source) do
{expr, name, _schema} = elem(sources, ix)
{expr || paren_expr(source, sources, query), name}
end
defp quote_qualified_name(name, sources, ix) do
{_, source, _} = elem(sources, ix)
[source, ?. | quote_name(name)]
end
defp quote_name(name) when is_atom(name) do
quote_name(Atom.to_string(name))
end
defp quote_name(name) do
if String.contains?(name, "\"") do
error!(nil, "bad field name #{inspect name}")
end
[?", name, ?"]
end
defp quote_table(nil, name), do: quote_table(name)
defp quote_table(prefix, name), do: [quote_table(prefix), ?., quote_table(name)]
defp quote_table(name) when is_atom(name),
do: quote_table(Atom.to_string(name))
defp quote_table(name) do
if String.contains?(name, "\"") do
error!(nil, "bad table name #{inspect name}")
end
[?", name, ?"]
end
defp single_quote(value), do: [?', escape_string(value), ?']
defp intersperse_map(list, separator, mapper, acc \\ [])
defp intersperse_map([], _separator, _mapper, acc),
do: acc
defp intersperse_map([elem], _separator, mapper, acc),
do: [acc | mapper.(elem)]
defp intersperse_map([elem | rest], separator, mapper, acc),
do: intersperse_map(rest, separator, mapper, [acc, mapper.(elem), separator])
defp intersperse_reduce(list, separator, user_acc, reducer, acc \\ [])
defp intersperse_reduce([], _separator, user_acc, _reducer, acc),
do: {acc, user_acc}
defp intersperse_reduce([elem], _separator, user_acc, reducer, acc) do
{elem, user_acc} = reducer.(elem, user_acc)
{[acc | elem], user_acc}
end
defp intersperse_reduce([elem | rest], separator, user_acc, reducer, acc) do
{elem, user_acc} = reducer.(elem, user_acc)
intersperse_reduce(rest, separator, user_acc, reducer, [acc, elem, separator])
end
defp if_do(condition, value) do
if condition, do: value, else: []
end
defp escape_string(value) when is_binary(value) do
:binary.replace(value, "'", "''", [:global])
end
defp ecto_to_db({:array, t}), do: [ecto_to_db(t), ?[, ?]]
defp ecto_to_db(:id), do: "integer"
defp ecto_to_db(:serial), do: "serial"
defp ecto_to_db(:bigserial), do: "bigserial"
defp ecto_to_db(:binary_id), do: "uuid"
defp ecto_to_db(:string), do: "varchar"
defp ecto_to_db(:binary), do: "bytea"
defp ecto_to_db(:map), do: Application.fetch_env!(:ecto, :postgres_map_type)
defp ecto_to_db({:map, _}), do: Application.fetch_env!(:ecto, :postgres_map_type)
defp ecto_to_db(:utc_datetime), do: "timestamp"
defp ecto_to_db(:naive_datetime), do: "timestamp"
defp ecto_to_db(other), do: Atom.to_string(other)
defp error!(nil, message) do
raise ArgumentError, message
end
defp error!(query, message) do
raise Ecto.QueryError, query: query, message: message
end
end
end
| 37.012807 | 109 | 0.600213 |
ffb72a349c317c6a8d242ffef5382bdad5de11ed | 1,403 | exs | Elixir | test/straw_hat_map/cities_test.exs | straw-hat-team/straw_hat_map | df71340122852577b9df2ed2afcbcc4be879aba1 | [
"MIT"
] | 4 | 2018-03-07T04:12:36.000Z | 2018-03-08T16:13:14.000Z | test/straw_hat_map/cities_test.exs | straw-hat-labs/straw_hat_map | df71340122852577b9df2ed2afcbcc4be879aba1 | [
"MIT"
] | 61 | 2018-03-07T04:50:42.000Z | 2019-11-01T10:24:03.000Z | test/straw_hat_map/cities_test.exs | straw-hat-team/straw_hat_map | df71340122852577b9df2ed2afcbcc4be879aba1 | [
"MIT"
] | 2 | 2018-03-22T19:44:45.000Z | 2019-08-02T03:45:47.000Z | defmodule StrawHat.Map.CitiesTests do
use StrawHat.Map.TestSupport.CaseTemplate, async: true
alias StrawHat.Map.Cities
describe "finding a city" do
test "with a valid ID" do
city = insert(:city)
assert {:ok, _city} = Cities.find_city(Repo, city.id)
end
test "with an invalid ID" do
city_id = Ecto.UUID.generate()
assert {:error, _reason} = Cities.find_city(Repo, city_id)
end
end
test "returning a pagination of cities" do
insert_list(6, :city)
city_page = Cities.get_cities(Repo, %{page: 2, page_size: 5})
assert length(city_page.entries) == 1
end
test "creating a city with valid inputs" do
params = params_with_assocs(:city)
assert {:ok, _city} = Cities.create_city(Repo, params)
end
test "updating a city with valid inputs" do
city = insert(:city)
{:ok, city} = Cities.update_city(Repo, city, %{name: "Havana"})
assert city.name == "Havana"
end
test "destroying an existing city" do
city = insert(:city)
assert {:ok, _} = Cities.destroy_city(Repo, city)
end
test "getting a list of cities with a list of city's IDs" do
cities_ids =
3
|> insert_list(:city)
|> Enum.map(&Map.get(&1, :id))
found_cities_ids =
Repo
|> Cities.get_cities_by_ids(cities_ids)
|> Enum.map(&Map.get(&1, :id))
assert cities_ids == found_cities_ids
end
end
| 23.779661 | 67 | 0.647897 |
ffb7627656d9bb230db7d6291bbfb144ac943b34 | 1,976 | ex | Elixir | lib/telluride_ui/messaging/pipeline_config_producer.ex | brsg/telluride_ui | f5f477ff2ed5aa9ef5baa2d6f4e4596ecee482fd | [
"Apache-2.0"
] | 2 | 2021-03-05T13:24:49.000Z | 2021-09-22T23:36:21.000Z | lib/telluride_ui/messaging/pipeline_config_producer.ex | brsg/telluride_ui | f5f477ff2ed5aa9ef5baa2d6f4e4596ecee482fd | [
"Apache-2.0"
] | null | null | null | lib/telluride_ui/messaging/pipeline_config_producer.ex | brsg/telluride_ui | f5f477ff2ed5aa9ef5baa2d6f4e4596ecee482fd | [
"Apache-2.0"
] | null | null | null | defmodule Telluride.Messaging.PipelineConfigProducer do
use GenServer
alias __MODULE__
alias Telluride.Messaging.AMQPConnectionManager
alias Telluride.Messaging.PipelineConfigQueue
################################################################################
# Client interface
################################################################################
def start_link(init_arg) do
GenServer.start_link(__MODULE__, init_arg, [name: __MODULE__])
end
def publish(message) when is_map(message) do
{:ok, json} = Jason.encode(message)
PipelineConfigProducer.publish(json)
end
def publish(message) when is_binary(message) do
GenServer.call(__MODULE__, {:publish, message, @routing_key})
end
################################################################################
# AMQPConnectionManager callbacks
################################################################################
def channel_available(channel) do
GenServer.cast(__MODULE__, {:channel_available, channel})
end
################################################################################
# GenServer callbacks
################################################################################
@impl true
def init(_init_arg) do
AMQPConnectionManager.request_channel(__MODULE__)
{:ok, nil}
end
@impl true
def handle_cast({:channel_available, channel}, _state) do
:ok = PipelineConfigQueue.configure_producer(channel)
{:noreply, channel}
end
@impl true
def handle_call({:publish, message, @routing_key}, _from, channel) do
AMQP.Basic.publish(
channel, #channel
PipelineConfigQueue.exchange, #exchange
PipelineConfigQueue.msg_routing_key, #routing key
message, #payload
persistent: true, #options...
content_type: "application/json"
)
{:reply, :ok, channel}
end
end
| 31.365079 | 82 | 0.515182 |
ffb77eaa83fe7554e4a63f93a95e77c7857c3947 | 539 | ex | Elixir | lib/nudge_api/matches/meeting.ex | feelja-tech/feelja-api | 03ce15430460cf2dac24a7740242c7e5ac5c5804 | [
"MIT"
] | null | null | null | lib/nudge_api/matches/meeting.ex | feelja-tech/feelja-api | 03ce15430460cf2dac24a7740242c7e5ac5c5804 | [
"MIT"
] | null | null | null | lib/nudge_api/matches/meeting.ex | feelja-tech/feelja-api | 03ce15430460cf2dac24a7740242c7e5ac5c5804 | [
"MIT"
] | null | null | null | defmodule NudgeApi.Matches.Meeting do
use Ecto.Schema
import Ecto.Changeset
schema "meetings" do
field :finalized_at, :utc_datetime
field :location, :map
field :happens_at, :utc_datetime
belongs_to :video_call, NudgeApi.Matches.VideoCall
has_many :user_meetings, NudgeApi.Matches.UserMeeting
timestamps(type: :utc_datetime)
end
@doc false
def changeset(meeting, attrs) do
meeting
|> cast(attrs, [:happens_at, :location, :finalized_at, :video_call_id])
|> validate_required([])
end
end
| 23.434783 | 75 | 0.721707 |
ffb77eedae87d60e47edb0614af295036cae9c03 | 3,549 | exs | Elixir | queen-attack/queen_attack_test.exs | nlhuykhang/elixir-exercism | 0462661cc411cb28b4bf800639b16684480a06a7 | [
"MIT"
] | null | null | null | queen-attack/queen_attack_test.exs | nlhuykhang/elixir-exercism | 0462661cc411cb28b4bf800639b16684480a06a7 | [
"MIT"
] | null | null | null | queen-attack/queen_attack_test.exs | nlhuykhang/elixir-exercism | 0462661cc411cb28b4bf800639b16684480a06a7 | [
"MIT"
] | null | null | null | if !System.get_env("EXERCISM_TEST_EXAMPLES") do
Code.load_file("queen_attack.exs", __DIR__)
end
ExUnit.start
ExUnit.configure exclude: :pending, trace: true
defmodule QueenAttackTest do
use ExUnit.Case
# test "default positions" do
# assert Queens.new_list([
# {1, {1, 2, 3}},
# {4, {2, 3, 4}},
# {5, {2, 3, 4}},
# {4, {4, 3, 2}},
# {4, {6, 3, 5}},
# {500, {1, 2, 1}},
# {10000, {1, 2, 3}},
# ]) == [1, 13, 16, 14, 22, 0, 0]
# end
# @tag :pending
test "default positions" do
queens = Queens.new
assert queens.white == {0, 3}
assert queens.black == {7, 3}
end
# @tag :pending
test "specific placement" do
queens = Queens.new({3, 7}, {6, 1})
assert queens.white == {3, 7}
assert queens.black == {6, 1}
end
# @tag :pending
test "cannot occupy same space" do
assert_raise ArgumentError, fn ->
Queens.new({2, 4}, {2, 4})
end
end
# @tag :pending
test "string representation" do
queens = Queens.new({2, 4}, {6, 6})
board = String.strip """
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ W _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ B _
_ _ _ _ _ _ _ _
"""
assert Queens.to_string(queens) == board
end
# @tag :pending
test "another string representation" do
queens = Queens.new({7, 1}, {0, 0})
board = String.strip """
B _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ W _ _ _ _ _ _
"""
assert Queens.to_string(queens) == board
end
# @tag :pending
test "yet another string representation" do
queens = Queens.new({4, 3}, {3, 4})
board = String.strip """
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ B _ _ _
_ _ _ W _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
"""
assert Queens.to_string(queens) == board
end
# @tag :pending
test "queen placed on the bottom right corner" do
queens = Queens.new({4, 3}, {7, 7})
board = String.strip """
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ W _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ B
"""
assert Queens.to_string(queens) == board
end
# @tag :pending
test "queen placed on the edge of the board" do
queens = Queens.new({4, 3}, {2, 7})
board = String.strip """
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ B
_ _ _ _ _ _ _ _
_ _ _ W _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
_ _ _ _ _ _ _ _
"""
assert Queens.to_string(queens) == board
end
# @tag :pending
test "cannot attack" do
queens = Queens.new({2, 3}, {4, 7})
refute Queens.can_attack?(queens)
end
# @tag :pending
test "can attack on same row" do
queens = Queens.new({2, 4}, {2, 7})
assert Queens.can_attack?(queens)
end
# @tag :pending
test "can attack on same column" do
queens = Queens.new({5, 4}, {2, 4})
assert Queens.can_attack?(queens)
end
# @tag :pending
test "can attack on diagonal" do
queens = Queens.new({1, 1}, {6, 6})
assert Queens.can_attack?(queens)
end
# @tag :pending
test "can attack on other diagonal" do
queens = Queens.new({0, 6}, {1, 7})
assert Queens.can_attack?(queens)
end
# @tag :pending
test "can attack on yet another diagonal" do
queens = Queens.new({4, 1}, {6, 3})
assert Queens.can_attack?(queens)
end
# @tag :pending
test "can attack on a diagonal slanted the other way" do
queens = Queens.new({6, 1}, {1, 6})
assert Queens.can_attack?(queens)
end
end
| 21.379518 | 58 | 0.573401 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.