hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
03cf62f16c5e06ca086a8868fcde1f3fea82786c | 2,667 | ex | Elixir | lib/maru/builder/parameter.ex | davidenko87/maru | 62b6ca0e42dd550683af8743f50e67048c4027d0 | [
"BSD-3-Clause"
] | 819 | 2016-11-25T07:12:04.000Z | 2022-03-16T06:59:36.000Z | lib/maru/builder/parameter.ex | davidenko87/maru | 62b6ca0e42dd550683af8743f50e67048c4027d0 | [
"BSD-3-Clause"
] | 71 | 2016-11-24T20:19:09.000Z | 2021-06-09T10:12:23.000Z | lib/maru/builder/parameter.ex | davidenko87/maru | 62b6ca0e42dd550683af8743f50e67048c4027d0 | [
"BSD-3-Clause"
] | 56 | 2015-01-10T23:34:12.000Z | 2016-11-17T00:13:56.000Z | alias Maru.Builder.Parameter
defmodule Parameter.Information do
@moduledoc false
defstruct attr_name: nil,
param_key: nil,
desc: nil,
type: nil,
default: nil,
required: true,
children: []
end
defmodule Parameter.Runtime do
@moduledoc false
defstruct attr_name: nil,
param_key: nil,
children: [],
nested: nil,
blank_func: nil,
parser_func: nil,
validate_func: nil
end
defmodule Parameter do
defstruct information: nil,
runtime: nil
defmacro __using__(_) do
quote do
@parameters []
Module.register_attribute(__MODULE__, :shared_params, accumulate: true)
import Parameter.DSLs, only: [params: 1, params: 2]
end
end
def using_helper(_) do
quote do
Module.register_attribute(__MODULE__, :shared_params, accumulate: true)
import Parameter.DSLs, only: [params: 2]
end
end
def after_helper(helper_module, %Macro.Env{module: env_module}) do
Enum.each(
helper_module.__shared_params__(),
&Module.put_attribute(env_module, :shared_params, &1)
)
end
def before_parse_namespace(%Macro.Env{module: module}) do
parameters = Module.get_attribute(module, :parameters)
Module.put_attribute(module, :parameters, [])
addittion_parameter =
case Module.get_attribute(module, :namespace_context) do
%{namespace: :route_param} = context ->
[attr_name: context.parameter, required: true]
|> Enum.concat(context.options)
|> Parameter.Helper.parse()
_ ->
nil
end
resource = Module.get_attribute(module, :resource)
new_parameters = resource.parameters ++ parameters ++ List.wrap(addittion_parameter)
Module.put_attribute(module, :resource, %{resource | parameters: new_parameters})
end
def before_parse_router(%Macro.Env{module: module}) do
resource = Module.get_attribute(module, :resource)
parameters = Module.get_attribute(module, :parameters)
Module.put_attribute(module, :parameters, [])
route = Module.get_attribute(module, :router)
Module.put_attribute(module, :router, %{route | parameters: resource.parameters ++ parameters})
end
def before_compile_helper(%Macro.Env{module: module} = env) do
shared_params =
for {name, params} <- Module.get_attribute(module, :shared_params) do
{name, params |> Macro.escape()}
end
quoted =
quote do
def __shared_params__ do
unquote(shared_params)
end
end
Module.eval_quoted(env, quoted)
end
end
| 26.939394 | 99 | 0.651669 |
03cf6ece53819d164126511246ebdd7817139f01 | 1,740 | exs | Elixir | mix.exs | WolfDan/dgraph_ex | 4dad42983f2387f10febf9996ac8f2db20aea710 | [
"MIT"
] | 21 | 2017-08-20T06:19:37.000Z | 2021-02-04T23:22:10.000Z | mix.exs | WolfDan/dgraph_ex | 4dad42983f2387f10febf9996ac8f2db20aea710 | [
"MIT"
] | 43 | 2017-08-06T21:03:28.000Z | 2018-09-08T13:00:35.000Z | mix.exs | WolfDan/dgraph_ex | 4dad42983f2387f10febf9996ac8f2db20aea710 | [
"MIT"
] | 1 | 2017-10-12T02:20:13.000Z | 2017-10-12T02:20:13.000Z | defmodule DgraphEx.Mixfile do
use Mix.Project
def project do
[
app: :dgraph_ex,
version: "0.1.5",
elixir: "~> 1.5.0",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps(),
package: package(),
description: description(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
"coveralls": :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test,
],
source_url: "https://github.com/elbow-jason/dgraph_ex",
]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
# Specify extra applications you'll use from Erlang/Elixir
[
extra_applications: [],
# mod: {DgraphEx.Application, []},
]
end
# Dependencies can be Hex packages:
#
# {:my_dep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:my_dep, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:httpoison, "~> 0.12.0"},
{:poison, "~> 3.1"},
{:excoveralls, "~> 0.7.2", only: :test},
{:ex_doc, ">= 0.0.0", only: :dev},
]
end
defp description do
"""
A database wrapper and model layer for dgraph.
"""
end
defp package do
# These are the default files included in the package
[
name: :dgraph_ex,
files: ["lib", "mix.exs", "README*", "LICENSE*", ".iex.exs"],
maintainers: ["Jason Goldberger"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/elbow-jason/dgraph_ex"}
]
end
end
| 24.507042 | 79 | 0.56092 |
03cf838b7ccbc45dfbc3a3d769c73409e133f781 | 694 | exs | Elixir | config/test.exs | tyjet/taskmaster_api | 4ba670731717299b07abb5d56f3e068015826963 | [
"MIT"
] | null | null | null | config/test.exs | tyjet/taskmaster_api | 4ba670731717299b07abb5d56f3e068015826963 | [
"MIT"
] | 1 | 2021-03-10T19:38:43.000Z | 2021-03-10T19:38:43.000Z | config/test.exs | tyjet/taskmaster_api | 4ba670731717299b07abb5d56f3e068015826963 | [
"MIT"
] | null | null | null | use Mix.Config
# Configure your database
#
# The MIX_TEST_PARTITION environment variable can be used
# to provide built-in test partitioning in CI environment.
# Run `mix help test` for more information.
config :taskmaster_api, TaskmasterApi.Repo,
username: "postgres",
password: "postgres",
database: "taskmaster_api_test#{System.get_env("MIX_TEST_PARTITION")}",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :taskmaster_api, TaskmasterApiWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 30.173913 | 73 | 0.760807 |
03cf8c65aec4b3445d7d5a605795296534950f5f | 12,531 | ex | Elixir | lib/ex_unit/lib/ex_unit/assertions.ex | Nicd/elixir | e62ef92a4be1b562033d35b2d822cc9d6c661077 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/lib/ex_unit/assertions.ex | Nicd/elixir | e62ef92a4be1b562033d35b2d822cc9d6c661077 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/lib/ex_unit/assertions.ex | Nicd/elixir | e62ef92a4be1b562033d35b2d822cc9d6c661077 | [
"Apache-2.0"
] | null | null | null | defexception ExUnit.AssertionError, message: "assertion failed"
defexception ExUnit.ExpectationError, expected: nil, actual: nil, reason: "",
negation: false, prelude: "Expected", description: nil do
def message(exception) do
if desc = exception.description do
"#{exception.prelude} #{desc} #{exception.full_reason} " <>
"#{exception.expected}. Insted got #{exception.actual}"
else
"#{exception.prelude} #{exception.expected} " <>
"#{exception.full_reason} #{exception.actual}"
end
end
def full_reason(exception) do
"to" <> if(exception.negation, do: " not ", else: " ") <> exception.reason
end
end
defmodule ExUnit.Assertions do
@moduledoc """
This module contains a set of assertions functions that are
imported by default into your test cases.
In general, a developer will want to use the general
`assert` macro in tests. The macro tries to be smart
and provide good reporting whenever there is a failure.
For example, `assert some_fun() == 10` will fail (assuming
`some_fun()` returns 13):
Expected 10 to be equal to 13
This module also provides other small convenient functions
like `assert_in_delta` and `assert_raise` to easily handle other
common cases as checking a float number or handling exceptions.
"""
@doc """
Asserts the `expected` value is true.
`assert` in general tries to be smart and provide a good
reporting whenever there is a failure. For example,
`assert 10 > 15` is going to fail with a message:
Expected 10 to be more than 15
## Examples
assert true
"""
defmacro assert(expected) do
translate_assertion(expected, fn ->
quote do
value = unquote(expected)
assert value, "Expected #{inspect value} to be true"
end
end)
end
@doc """
Refutes the `expected` value is true.
`refute` in general tries to be smart and provide a good
reporting whenever there is a failure.
## Examples
refute false
"""
defmacro refute(expected) do
contents = translate_assertion({ :!, [], [expected] }, fn ->
quote do
value = unquote(expected)
assert !value, "Expected #{inspect value} to be false"
end
end)
{ :!, [], [contents] }
end
## START HELPERS
defp translate_assertion({ :=, _, [left, right] }, _else) do
quote do
right = unquote(right)
case right do
unquote(left) ->
right
_ ->
raise ExUnit.ExpectationError,
expected: inspect(right),
actual: unquote(Macro.to_binary(left)),
reason: "match pattern (=)"
end
end
end
defp translate_assertion({ :==, _, [left, right] }, _else) do
assert_operator :==, left, right, "be equal to (==)"
end
defp translate_assertion({ :<, _, [left, right] }, _else) do
assert_operator :<, left, right, "be less than"
end
defp translate_assertion({ :>, _, [left, right] }, _else) do
assert_operator :>, left, right, "be more than"
end
defp translate_assertion({ :<=, _, [left, right] }, _else) do
assert_operator :<=, left, right, "be less than or equal to"
end
defp translate_assertion({ :>=, _, [left, right] }, _else) do
assert_operator :>=, left, right, "be more than or equal to"
end
defp translate_assertion({ :===, _, [left, right] }, _else) do
assert_operator :===, left, right, "be equal to (===)"
end
defp translate_assertion({ :!==, _, [left, right] }, _else) do
assert_operator :!==, left, right, "be not equal to (!==)"
end
defp translate_assertion({ :!=, _, [left, right] }, _else) do
assert_operator :!=, left, right, "be not equal to (!=)"
end
defp translate_assertion({ :=~, _, [left, right] }, _else) do
assert_operator :=~, left, right, "match (=~)"
end
defp translate_assertion({ :in, _, [left, right] }, _else) do
quote do
left = unquote(left)
right = unquote(right)
assert Enum.member?(right, left), left, right, reason: "be in"
end
end
## Negative versions
defp translate_assertion({ :!, _, [{ :=, _, [left, right] }] }, _else) do
quote do
right = unquote(right)
case right do
unquote(left) ->
raise ExUnit.ExpectationError,
expected: inspect(right),
actual: unquote(Macro.to_binary(left)),
reason: "match pattern (=)",
negation: true
_ ->
nil
end
end
end
defp translate_assertion({ :!, _, [{ :=~, _, [left, right] }] }, _else) do
quote do
left = unquote(left)
right = unquote(right)
assert !(left =~ right), left, right, reason: "match (=~)", negation: true
end
end
defp translate_assertion({ negation, _, [{ :in, _, [left, right] }] }, _else) when negation in [:!, :not] do
quote do
left = unquote(left)
right = unquote(right)
assert !Enum.member?(right, left), left, right, reason: "be in", negation: true
end
end
## Fallback
defp translate_assertion(_expected, fallback) do
fallback.()
end
defp assert_operator(operator, expected, actual, text) do
quote do
left = unquote(expected)
right = unquote(actual)
assert unquote(operator)(left, right), left, right, reason: unquote(text)
end
end
## END HELPERS
@doc """
Asserts the `expected` value is true.
If it fails, raises the given `message`.
## Examples
assert false, "it will never be true"
"""
def assert(expected, message) when is_binary(message) do
unless expected, do: raise(ExUnit.AssertionError, message: message)
true
end
@doc """
Asserts the `expected` value is true.
If it fails, it raises an expectation error
using the given expected and actual values.
## Examples
assert this > that, this, that, reason: "more than"
"""
def assert(value, expected, actual, opts) do
unless value do
raise ExUnit.ExpectationError,
Keyword.merge([expected: inspect(expected), actual: inspect(actual)], opts)
end
true
end
@doc """
Assets a message was or is going to be received. Differently from
`assert_received`, it has a default timeout time of 100 miliseconds.
The given `expected` content must be a pattern.
## Examples
assert_receive :hello
Asserts against a larger timeout:
assert_receive :hello, 20_000
You can also match against specific patterns:
assert_receive { :hello, _ }
"""
defmacro assert_receive(expected, timeout // 100, message // nil) do
do_assert_receive(expected, timeout, message)
end
@doc """
Asserts a message was received and is in the current process mailbox.
The given `expected` content must to be a match pattern.
Timeout is set to 0, so there is no waiting time.
## Examples
self <- :hello
assert_received :hello
You can also match against specific patterns:
self <- { :hello, "world" }
assert_received { :hello, _ }
"""
defmacro assert_received(expected, message // nil) do
do_assert_receive(expected, 0, message)
end
defp do_assert_receive(expected, timeout, message) do
binary = Macro.to_binary(expected)
quote do
receive do
unquote(expected) = received -> received
after
unquote(timeout) ->
flunk unquote(message) || "Expected to have received message matching #{unquote binary}"
end
end
end
@doc """
Asserts the `exception` is raised during `function` execution with
the `expected_message`. Returns the rescued exception, fails otherwise.
## Examples
assert_raise ArithmeticError, "bad argument in arithmetic expression", fn ->
1 + "test"
end
"""
def assert_raise(exception, message, function) when is_function(function) do
error = assert_raise(exception, function)
is_match = case message do
re when is_regex(re) -> error.message =~ re
bin when is_binary(bin) -> error.message == bin
end
assert is_match, message, error.message,
prelude: "Expected #{inspect error}'s message", reason: "match"
error
end
@doc """
Asserts the `exception` is raised during `function` execution.
Returns the rescued exception, fails otherwise.
## Examples
assert_raise ArithmeticError, fn ->
1 + "test"
end
"""
def assert_raise(exception, function) when is_function(function) do
try do
function.()
flunk "Expected #{inspect exception} exception but nothing was raised"
rescue
error in [exception] -> error
error ->
name = error.__record__(:name)
if name in [ExUnit.AssertionError, ExUnit.ExpectationError] do
raise(error)
else
flunk "Expected exception #{inspect exception}, got #{inspect name} (#{error.message})"
end
end
end
@doc """
Asserts the `expected` and `received` are within `delta`.
## Examples
assert_in_delta 1.1, 1.5, 0.2
assert_in_delta 10, 15, 4
"""
def assert_in_delta(expected, received, delta, message // nil) do
diff = abs(expected - received)
message = message ||
"Expected |#{inspect expected} - #{inspect received}| (#{inspect diff}) to be < #{inspect delta}"
assert diff < delta, message
end
@doc """
Asserts the given `expression` will throw a value.
Returns the thrown value or fails otherwise.
## Examples
assert catch_throw(throw 1) == 1
"""
defmacro catch_throw(expression) do
do_catch(:throw, expression)
end
@doc """
Asserts the given `expression` will exit.
Returns the exit status/message or fails otherwise.
## Examples
assert catch_exit(exit 1) == 1
"""
defmacro catch_exit(expression) do
do_catch(:exit, expression)
end
@doc """
Asserts the given `expression` will cause an error.
Returns the error or fails otherwise.
## Examples
assert catch_error(error 1) == 1
"""
defmacro catch_error(expression) do
do_catch(:error, expression)
end
defp do_catch(kind, expr) do
quote do
try do
unquote(expr)
flunk "Expected to catch #{unquote(kind)}, got nothing"
rescue
e in [ExUnit.AssertionError, ExUnit.ExpectationError] -> raise(e)
catch
unquote(kind), what_we_got -> what_we_got
end
end
end
@doc """
Asserts the `not_expected` value is nil or false.
In case it is a truthy value, raises the given message.
## Examples
refute true, "This will obviously fail"
"""
def refute(not_expected, message) do
not assert(!not_expected, message)
end
@doc """
Asserts a message was not received and won't be during
a timeout value.
The `not_expected` contents must be a match pattern.
## Examples
refute_receive :bye
Refute received with a explicit timeout:
refute_receive :bye, 1000
"""
defmacro refute_receive(not_expected, timeout // 100, message // nil) do
do_refute_receive(not_expected, timeout, message)
end
@doc """
Asserts a message was not received (i.e. it is not in the current process mailbox).
The `not_expected` contents must be a match pattern.
Timeout is set to 0, so there is no waiting time.
## Examples
self <- :hello
refute_received :bye
"""
defmacro refute_received(not_expected, message // nil) do
do_refute_receive(not_expected, 0, message)
end
defp do_refute_receive(not_expected, timeout, message) do
binary = Macro.to_binary(not_expected)
quote do
receive do
unquote(not_expected) = actual ->
flunk unquote(message) || "Expected to not have received message matching #{unquote binary}, got #{inspect actual}"
after
unquote(timeout) -> false
end
end
end
@doc """
Asserts the `expected` and `received` are not within `delta`.
## Examples
refute_in_delta 1.1, 1.2, 0.2
refute_in_delta 10, 11, 2
"""
def refute_in_delta(expected, received, delta, message // nil) do
diff = abs(expected - received)
message = message ||
"Expected |#{inspect expected} - #{inspect received}| (#{inspect diff}) to not be < #{inspect delta}"
refute diff < delta, message
end
@doc """
Fails with a message.
## Examples
flunk "This should raise an error"
"""
@spec flunk :: no_return
@spec flunk(String.t) :: no_return
def flunk(message // "Epic Fail!") do
raise ExUnit.AssertionError, message: message
end
end
| 25.521385 | 125 | 0.642965 |
03cf9d055571a72dbeb6227b1f008f3e3a1bb008 | 3,810 | ex | Elixir | clients/cloud_error_reporting/lib/google_api/cloud_error_reporting/v1beta1/model/reported_error_event.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | 1 | 2021-10-01T09:20:41.000Z | 2021-10-01T09:20:41.000Z | clients/cloud_error_reporting/lib/google_api/cloud_error_reporting/v1beta1/model/reported_error_event.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/cloud_error_reporting/lib/google_api/cloud_error_reporting/v1beta1/model/reported_error_event.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudErrorReporting.V1beta1.Model.ReportedErrorEvent do
@moduledoc """
An error event which is reported to the Error Reporting system.
## Attributes
* `context` (*type:* `GoogleApi.CloudErrorReporting.V1beta1.Model.ErrorContext.t`, *default:* `nil`) - Optional. A description of the context in which the error occurred.
* `eventTime` (*type:* `DateTime.t`, *default:* `nil`) - Optional. Time when the event occurred. If not provided, the time when the event was received by the Error Reporting system will be used.
* `message` (*type:* `String.t`, *default:* `nil`) - Required. The error message. If no `context.reportLocation` is provided, the message must contain a header (typically consisting of the exception type name and an error message) and an exception stack trace in one of the supported programming languages and formats. Supported languages are Java, Python, JavaScript, Ruby, C#, PHP, and Go. Supported stack trace formats are: * **Java**: Must be the return value of [`Throwable.printStackTrace()`](https://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html#printStackTrace%28%29). * **Python**: Must be the return value of [`traceback.format_exc()`](https://docs.python.org/2/library/traceback.html#traceback.format_exc). * **JavaScript**: Must be the value of [`error.stack`](https://github.com/v8/v8/wiki/Stack-Trace-API) as returned by V8. * **Ruby**: Must contain frames returned by [`Exception.backtrace`](https://ruby-doc.org/core-2.2.0/Exception.html#method-i-backtrace). * **C#**: Must be the return value of [`Exception.ToString()`](https://msdn.microsoft.com/en-us/library/system.exception.tostring.aspx). * **PHP**: Must start with `PHP (Notice|Parse error|Fatal error|Warning)` and contain the result of [`(string)$exception`](http://php.net/manual/en/exception.tostring.php). * **Go**: Must be the return value of [`runtime.Stack()`](https://golang.org/pkg/runtime/debug/#Stack).
* `serviceContext` (*type:* `GoogleApi.CloudErrorReporting.V1beta1.Model.ServiceContext.t`, *default:* `nil`) - Required. The service context in which this error has occurred.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:context => GoogleApi.CloudErrorReporting.V1beta1.Model.ErrorContext.t() | nil,
:eventTime => DateTime.t() | nil,
:message => String.t() | nil,
:serviceContext => GoogleApi.CloudErrorReporting.V1beta1.Model.ServiceContext.t() | nil
}
field(:context, as: GoogleApi.CloudErrorReporting.V1beta1.Model.ErrorContext)
field(:eventTime, as: DateTime)
field(:message)
field(:serviceContext, as: GoogleApi.CloudErrorReporting.V1beta1.Model.ServiceContext)
end
defimpl Poison.Decoder, for: GoogleApi.CloudErrorReporting.V1beta1.Model.ReportedErrorEvent do
def decode(value, options) do
GoogleApi.CloudErrorReporting.V1beta1.Model.ReportedErrorEvent.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudErrorReporting.V1beta1.Model.ReportedErrorEvent do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 68.035714 | 1,405 | 0.74357 |
03cfb1b09ed1ae633835a7851b9d4b459a34eb98 | 620 | ex | Elixir | lib/receptar/translations/translation.ex | johannes-mueller/receptar | 6ab2cbf93c5da01409a9d8ed3d56a825e8350a1e | [
"Apache-2.0"
] | null | null | null | lib/receptar/translations/translation.ex | johannes-mueller/receptar | 6ab2cbf93c5da01409a9d8ed3d56a825e8350a1e | [
"Apache-2.0"
] | null | null | null | lib/receptar/translations/translation.ex | johannes-mueller/receptar | 6ab2cbf93c5da01409a9d8ed3d56a825e8350a1e | [
"Apache-2.0"
] | null | null | null | defmodule Receptar.Translations.Translation do
use Ecto.Schema
import Ecto.Changeset
schema "translations" do
field :content, :string
field :language, :string
belongs_to :substance, Receptar.Substances.Substance
belongs_to :recipe, Receptar.Recipes.Recipe
belongs_to :unit, Receptar.Units.Unit
belongs_to :instruction, Receptar.Instructions.Instruction
timestamps()
end
@doc false
def changeset(translation, attrs) do
translation
|> cast(attrs, [:content, :language, :substance_id, :instruction_id, :unit_id])
|> validate_required([:content, :language])
end
end
| 25.833333 | 83 | 0.732258 |
03cfb5d83c71cc296075ff7f4dddb8d978be420c | 5,229 | ex | Elixir | lib/farmbot/http/http.ex | pdgonzalez872/farmbot_os | a444248f05ee8f4fe57f6a4865b942131960f76c | [
"MIT"
] | null | null | null | lib/farmbot/http/http.ex | pdgonzalez872/farmbot_os | a444248f05ee8f4fe57f6a4865b942131960f76c | [
"MIT"
] | null | null | null | lib/farmbot/http/http.ex | pdgonzalez872/farmbot_os | a444248f05ee8f4fe57f6a4865b942131960f76c | [
"MIT"
] | 1 | 2020-12-16T16:39:32.000Z | 2020-12-16T16:39:32.000Z | defmodule Farmbot.HTTP do
@moduledoc "Wraps an HTTP Adapter."
# credo:disable-for-this-file Credo.Check.Refactor.FunctionArity
use GenServer
alias Farmbot.HTTP.{Adapter, Error, Response}
@adapter Application.get_env(:farmbot, :behaviour)[:http_adapter]
@adapter || raise("No http adapter.")
@typep method :: Adapter.method
@typep url :: Adapter.url
@typep body :: Adapter.body
@typep headers :: Adapter.headers
@typep opts :: Adapter.opts
@doc """
Make an http request. Will not raise.
* `method` - can be any http verb
* `url` - fully formatted url or an api slug.
* `body` - body can be any of:
* binary
* `{:multipart, [{binary_key, binary_value}]}`
* headers - `[{binary_key, binary_value}]`
* opts - Keyword opts to be passed to adapter (hackney/httpoison)
* `file` - option to be passed if the output should be saved to a file.
"""
@spec request(method, url, body, headers, opts)
:: {:ok, Response.t} | {:error, term}
def request(method, url, body \\ "", headers \\ [], opts \\ [])
def request(method, url, body, headers, opts) do
call = {:request, method, url, body, headers, opts}
GenServer.call(__MODULE__, call, :infinity)
end
@doc "Same as `request/5` but raises."
@spec request!(method, url, body, headers, opts) :: Response.t | no_return
def request!(method, url, body \\ "", headers \\ [], opts \\ [])
def request!(method, url, body, headers, opts) do
case request(method, url, body, headers, opts) do
{:ok, %Response{status_code: code} = resp}
when code > 199 and code < 300 -> resp
{:ok, %Response{} = resp} -> raise Error, resp
{:error, reason} -> raise Error, reason
end
end
@doc "HTTP GET request."
@spec get(url, headers, opts) :: {:ok, Response.t} | {:error, term}
def get(url, headers \\ [], opts \\ [])
def get(url, headers, opts) do
request(:get, url, "", headers, opts)
end
@doc "Same as `get/3` but raises."
@spec get!(url, headers, opts) :: Response.t | no_return
def get!(url, headers \\ [], opts \\ [])
def get!(url, headers, opts) do
request!(:get, url, "", headers, opts)
end
@doc "HTTP POST request."
@spec post(url, headers, opts) :: {:ok, Response.t} | {:error, term}
def post(url, body, headers \\ [], opts \\ [])
def post(url, body, headers, opts) do
request(:post, url, body, headers, opts)
end
@doc "Same as `post/4` but raises."
@spec post!(url, headers, opts) :: Response.t | no_return
def post!(url, body, headers \\ [], opts \\ [])
def post!(url, body, headers, opts) do
request!(:post, url, body, headers, opts)
end
def put(url, body, headers \\ [], opts \\ [])
def put(url, body, headers, opts) do
request(:put, url, body, headers, opts)
end
def put!(url, body, headers \\ [], opts \\ [])
def put!(url, body, headers, opts) do
request!(:put, url, body, headers, opts)
end
def delete(url, headers \\ [], opts \\ [])
def delete(url, headers, opts) do
request!(:delete, url, "", headers, opts)
end
def delete!(url, headers \\ [], opts \\ [])
def delete!(url, headers, opts) do
request!(:delete, url, "", headers, opts)
end
@doc "Download a file to the filesystem."
def download_file(url,
path,
progress_callback \\ nil,
payload \\ "",
headers \\ [],
stream_fun \\ nil)
def download_file(url, path, progress_callback, payload, hddrs, stream_fun) do
opts = {url, path, progress_callback, payload, hddrs, stream_fun}
call = {:download_file, opts}
GenServer.call(__MODULE__, call, :infinity)
end
@doc "Upload a file to FB storage."
def upload_file(path, meta \\ nil) do
if File.exists?(path) do
GenServer.call(__MODULE__, {:upload_file, {path, meta}}, :infinity)
else
{:error, "#{path} not found"}
end
end
@doc "Start HTTP Services."
def start_link do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
def init([]) do
{:ok, adapter} = @adapter.start_link()
Process.link(adapter)
{:ok, %{adapter: adapter}}
end
def handle_call({:request, _, _, _, _, _} = req, _from, state) do
{:request, method, url, body, headers, opts} = req
args = [state.adapter, method, url, body, headers, opts]
res = case apply(@adapter, :request, args) do
{:ok, %Response{}} = res -> res
{:error, _} = res -> res
end
{:reply, res, state}
end
def handle_call({:download_file, call}, _from, %{adapter: adapter} = state) do
{url, path, progress_callback, payload, headers, stream_fun} = call
args = [adapter, url, path, progress_callback, payload, headers, stream_fun]
res = case apply(@adapter, :download_file, args) do
{:ok, _} = res -> res
{:error, _} = res -> res
end
{:reply, res, state}
end
def handle_call({:upload_file, {path, meta}}, _from, state) do
meta_arg = meta || %{x: -1, y: -1, z: -1}
args = [state.adapter, path, meta_arg]
res = case apply(@adapter, :upload_file, args) do
{:ok, _} = res -> res
{:error, _} = res -> res
end
{:reply, res, state}
end
end
| 30.225434 | 80 | 0.603557 |
03cfc16e1b92953a4860350e82886e04e168a119 | 6,889 | ex | Elixir | lib/ex_machina.ex | sheharyarn/ex_machina | bca77b13636d3b6897434715b64c0aa33e04158b | [
"MIT"
] | null | null | null | lib/ex_machina.ex | sheharyarn/ex_machina | bca77b13636d3b6897434715b64c0aa33e04158b | [
"MIT"
] | null | null | null | lib/ex_machina.ex | sheharyarn/ex_machina | bca77b13636d3b6897434715b64c0aa33e04158b | [
"MIT"
] | null | null | null | defmodule ExMachina do
@moduledoc """
Defines functions for generating data
In depth examples are in the [README](readme.html)
"""
defmodule UndefinedFactoryError do
@moduledoc """
Error raised when trying to build or create a factory that is undefined.
"""
defexception [:message]
def exception(factory_name) do
message = """
No factory defined for #{inspect(factory_name)}.
Please check for typos or define your factory:
def #{factory_name}_factory do
...
end
"""
%UndefinedFactoryError{message: message}
end
end
use Application
@doc false
def start(_type, _args), do: ExMachina.Sequence.start_link()
defmacro __using__(_opts) do
quote do
@before_compile unquote(__MODULE__)
import ExMachina, only: [sequence: 1, sequence: 2]
def build(factory_name, attrs \\ %{}) do
ExMachina.build(__MODULE__, factory_name, attrs)
end
def build_pair(factory_name, attrs \\ %{}) do
ExMachina.build_pair(__MODULE__, factory_name, attrs)
end
def build_list(number_of_records, factory_name, attrs \\ %{}) do
ExMachina.build_list(__MODULE__, number_of_records, factory_name, attrs)
end
@spec create(any) :: no_return
def create(_) do
raise_function_replaced_error("create/1", "insert/1")
end
@spec create(any, any) :: no_return
def create(_, _) do
raise_function_replaced_error("create/2", "insert/2")
end
@spec create_pair(any, any) :: no_return
def create_pair(_, _) do
raise_function_replaced_error("create_pair/2", "insert_pair/2")
end
@spec create_list(any, any, any) :: no_return
def create_list(_, _, _) do
raise_function_replaced_error("create_list/3", "insert_list/3")
end
@spec raise_function_replaced_error(String.t(), String.t()) :: no_return
defp raise_function_replaced_error(old_function, new_function) do
raise """
#{old_function} has been removed.
If you are using ExMachina.Ecto, use #{new_function} instead.
If you are using ExMachina with a custom `save_record/2`, you now must use ExMachina.Strategy.
See the ExMachina.Strategy documentation for examples.
"""
end
defoverridable create: 1, create: 2, create_pair: 2, create_list: 3
end
end
@doc """
Shortcut for creating unique string values.
This is automatically imported into a model factory when you `use ExMachina`.
This is equivalent to `sequence(name, &"\#{name}\#{&1}")`. If you need to
customize the returned string, see `sequence/2`.
Note that sequences keep growing and are *not* reset by ExMachina. Most of the
time you won't need to reset the sequence, but when you do need to reset them,
you can use `ExMachina.Sequence.reset/0`.
## Examples
def user_factory do
%User{
# Will generate "username0" then "username1", etc.
username: sequence("username")
}
end
def article_factory do
%Article{
# Will generate "Article Title0" then "Article Title1", etc.
title: sequence("Article Title")
}
end
"""
@spec sequence(String.t()) :: String.t()
def sequence(name), do: ExMachina.Sequence.next(name)
@doc """
Create sequences for generating unique values.
This is automatically imported into a model factory when you `use ExMachina`.
The `name` can be any term, although it is typically an atom describing the
sequence. Each time a sequence is called with the same `name`, its number is
incremented by one.
The `formatter` function takes the sequence number, and returns a sequential
representation of that number – typically a formatted string.
## Examples
def user_factory do
%{
# Will generate "me-0@foo.com" then "me-1@foo.com", etc.
email: sequence(:email, &"me-\#{&1}@foo.com"),
# Will generate "admin" then "user", "other", "admin" etc.
role: sequence(:role, ["admin", "user", "other"])
}
end
"""
@spec sequence(any, (integer -> any) | nonempty_list) :: any
def sequence(name, formatter), do: ExMachina.Sequence.next(name, formatter)
@doc """
Builds a single factory.
This will defer to the `[factory_name]_factory/0` callback defined in the
factory module in which it is `use`d.
## Example
def user_factory do
%{name: "John Doe", admin: false}
end
# Returns %{name: "John Doe", admin: false}
build(:user)
# Returns %{name: "John Doe", admin: true}
build(:user, admin: true)
"""
@callback build(factory_name :: atom) :: any
@callback build(factory_name :: atom, attrs :: keyword | map) :: any
@doc false
def build(module, factory_name, attrs \\ %{}) do
attrs = Enum.into(attrs, %{})
function_name = build_function_name(factory_name)
cond do
Code.ensure_loaded?(module) && function_exported?(module, function_name, 1) ->
apply(module, function_name, [attrs])
Code.ensure_loaded?(module) && function_exported?(module, function_name, 0) ->
apply(module, function_name, []) |> do_merge(attrs)
true ->
raise UndefinedFactoryError, factory_name
end
end
defp build_function_name(factory_name) do
factory_name
|> Atom.to_string()
|> Kernel.<>("_factory")
|> String.to_atom()
end
defp do_merge(%{__struct__: _} = record, attrs), do: struct!(record, attrs)
defp do_merge(record, attrs), do: Map.merge(record, attrs)
@doc """
Builds two factories.
This is just an alias for `build_list(2, factory_name, attrs)`.
## Example
# Returns a list of 2 users
build_pair(:user)
"""
@callback build_pair(factory_name :: atom) :: list
@callback build_pair(factory_name :: atom, attrs :: keyword | map) :: list
@doc false
def build_pair(module, factory_name, attrs \\ %{}) do
ExMachina.build_list(module, 2, factory_name, attrs)
end
@doc """
Builds any number of factories.
## Example
# Returns a list of 3 users
build_list(3, :user)
"""
@callback build_list(number_of_records :: integer, factory_name :: atom) :: list
@callback build_list(number_of_records :: integer, factory_name :: atom, attrs :: keyword | map) ::
list
@doc false
def build_list(module, number_of_records, factory_name, attrs \\ %{}) do
Stream.repeatedly(fn ->
ExMachina.build(module, factory_name, attrs)
end)
|> Enum.take(number_of_records)
end
defmacro __before_compile__(_env) do
quote do
@doc "Raises a helpful error if no factory is defined."
@spec factory(any) :: no_return
def factory(factory_name), do: raise(UndefinedFactoryError, factory_name)
end
end
end
| 28.466942 | 102 | 0.650457 |
03cfc6117c131ef9acfc1b58a614ae62d3554832 | 853 | exs | Elixir | mix.exs | brienw/flipex | 931e13a733306cf8d547ea1a3b4ac3351d88b511 | [
"MIT",
"Unlicense"
] | 1 | 2016-12-01T15:59:15.000Z | 2016-12-01T15:59:15.000Z | mix.exs | brienw/flipex | 931e13a733306cf8d547ea1a3b4ac3351d88b511 | [
"MIT",
"Unlicense"
] | null | null | null | mix.exs | brienw/flipex | 931e13a733306cf8d547ea1a3b4ac3351d88b511 | [
"MIT",
"Unlicense"
] | null | null | null | defmodule TextFlip.Mixfile do
use Mix.Project
def project do
[app: :flip_text,
version: "0.1.3",
elixir: "~> 1.3",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps()
]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: [:logger]]
end
defp deps do
[
{:ex_doc, ">= 0.0.0", only: :dev},
{:junit_formatter, "~> 2.0", only: :test}
]
end
defp description do
"""
˙ʎɹɐɹqıʃ ƃuıddıʃɟ ʇxǝʇ ǝʃdɯıs ∀
"""
end
defp package do
[
name: "flip_text",
maintainers: ["Brien Wankel"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/brienw/flipex"}
]
end
end
| 18.955556 | 62 | 0.570926 |
03cfcc023754eeef5b52b7c7a8e099ebcb387637 | 146 | ex | Elixir | lib/tock/application.ex | malomohq/tock | 8cfa71a7dfbe857a1476fcb1bfa27e642f3bb598 | [
"MIT"
] | null | null | null | lib/tock/application.ex | malomohq/tock | 8cfa71a7dfbe857a1476fcb1bfa27e642f3bb598 | [
"MIT"
] | null | null | null | lib/tock/application.ex | malomohq/tock | 8cfa71a7dfbe857a1476fcb1bfa27e642f3bb598 | [
"MIT"
] | null | null | null | defmodule Tock.Application do
@moduledoc false
use Application
@impl true
def start(_type, _args), do: Tock.Supervisor.start_link()
end
| 16.222222 | 59 | 0.746575 |
03cfdb6bb9b10d1f84fa132942bc6196d3474acc | 37,751 | ex | Elixir | lib/ex_unit/lib/ex_unit/diff.ex | gianluca-nitti/elixir | 9c5bc2c2053f5956b5fd03de484ac8131ff32f25 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/lib/ex_unit/diff.ex | gianluca-nitti/elixir | 9c5bc2c2053f5956b5fd03de484ac8131ff32f25 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/lib/ex_unit/diff.ex | gianluca-nitti/elixir | 9c5bc2c2053f5956b5fd03de484ac8131ff32f25 | [
"Apache-2.0"
] | null | null | null | defmodule ExUnit.Diff do
@moduledoc false
# A Diff struct and functions.
#
# The Diff struct contains the fields `:equivalent?`, `:left`, `:right`.
# The `:equivalent?` field represents if the `:left` and `:right` side are
# equivalents and contain no diffs. The `:left` and `:right` represent the sides
# of the comparison and contain ASTs with some special metas: `:diff` and
# `:diff_container`.
#
# When meta `:diff` is `true`, the AST inside of it has no equivalent on the
# other side and should be rendered in a different color. If the AST is a
# literal and doesn't contain meta, the `:diff` meta will be place in a wrapping
# block.
alias Code.Identifier
alias Inspect.Algebra
defstruct equivalent?: true,
left: nil,
right: nil
@doc """
Returns the diff between `left` and `right` and env after the comparison.
The `left` side can be a literal or an AST, the `right` should be always a
value. The `context` should be `{:match, pins}` for pattern matching and
`expr` for any other case.
"""
def compute(left, right, :expr) do
compare_quoted(left, right, %{pins: %{}, context: nil, current_vars: %{}})
end
def compute(left, right, {:match, pins}) do
compare_quoted(left, right, %{pins: Map.new(pins), context: :match, current_vars: %{}})
end
defp compare_quoted({:_, _, context} = left, right, env) when is_atom(context) do
diff_right = escape(right)
diff = %__MODULE__{equivalent?: true, left: left, right: diff_right}
{diff, env}
end
defp compare_quoted({:^, _, [{name, _, context}]} = left, right, env)
when is_atom(name) and is_atom(context) do
compare_pin(left, right, env)
end
defp compare_quoted({name, _, context} = left, right, env)
when is_atom(name) and is_atom(context) do
compare_var(left, right, env)
end
defp compare_quoted({:-, _, [number]}, right, env)
when is_number(number) and is_number(right) do
compare_quoted(-number, right, env)
end
defp compare_quoted({:+, _, [number]}, right, env)
when is_number(number) and is_number(right) do
compare_quoted(number, right, env)
end
defp compare_quoted(literal, literal, env)
when is_atom(literal) or is_number(literal) or is_reference(literal) or
is_pid(literal) or is_function(literal) do
diff = %__MODULE__{equivalent?: true, left: literal, right: literal}
{diff, env}
end
defp compare_quoted(left, right, env) when is_number(left) and is_number(right) do
compare_number(left, right, env)
end
defp compare_quoted({:++, _, _} = left, right, env) when is_list(right) do
compare_maybe_improper_list(left, right, env)
end
defp compare_quoted(left, right, env) when is_list(left) and is_list(right) do
compare_maybe_list(left, right, env)
end
defp compare_quoted({:{}, _, _} = left, right, env) when is_tuple(right) do
compare_tuple(left, right, env)
end
defp compare_quoted({_, _} = left, right, env) when is_tuple(right) do
compare_tuple(left, right, env)
end
defp compare_quoted(left, right, %{context: nil} = env)
when is_tuple(left) and is_tuple(right) do
compare_tuple(left, right, env)
end
defp compare_quoted({:%, _, [_, {:%{}, _, items}]} = left, %{} = right, env)
when is_list(items) do
compare_struct(left, right, env)
end
defp compare_quoted({:%{}, _, [{:__struct__, _} | _]} = left, %{} = right, env) do
compare_struct(left, right, env)
end
defp compare_quoted({:%{}, _, items} = left, %struct{} = right, env) when is_list(items) do
compare_map(left, right, nil, struct, env)
end
defp compare_quoted({:%{}, _, items} = left, %{} = right, env) when is_list(items) do
compare_map(left, right, nil, nil, env)
end
defp compare_quoted(%_{} = left, %{} = right, env) do
compare_struct(left, right, env)
end
defp compare_quoted(%{} = left, %{} = right, env) do
compare_map(left, right, nil, nil, env)
end
defp compare_quoted(left, right, env) when is_binary(left) and is_binary(right) do
compare_string(left, right, ?\", env)
end
defp compare_quoted({:<>, _, _} = left, right, env) when is_binary(right) do
compare_string_concat(left, right, env)
end
defp compare_quoted({_, [{:expanded, expanded} | _], _} = left, right, env) do
macro = Macro.update_meta(left, &Keyword.delete(&1, :expanded))
compare_macro(macro, expanded, right, env)
end
defp compare_quoted(left, right, %{context: :match} = env) do
diff_left = update_diff_meta(left, true)
diff_right = escape(right) |> update_diff_meta(true)
diff = %__MODULE__{equivalent?: false, left: diff_left, right: diff_right}
{diff, env}
end
defp compare_quoted(left, right, env) do
diff_left = escape(left) |> update_diff_meta(true)
diff_right = escape(right) |> update_diff_meta(true)
diff = %__MODULE__{equivalent?: false, left: diff_left, right: diff_right}
{diff, env}
end
# Macros
defp compare_macro(macro, expanded, right, env) do
{diff, post_env} = compare_quoted(expanded, right, env)
diff_left = update_diff_meta(macro, !diff.equivalent?)
{%{diff | left: diff_left}, post_env}
end
# Pins
defp compare_pin({:^, _, [{name, _, _}]} = pin, right, %{pins: pins} = env) do
%{^name => pin_value} = pins
{diff, post_env} = compare_quoted(pin_value, right, env)
diff_left = update_diff_meta(pin, !diff.equivalent?)
{%{diff | left: diff_left}, post_env}
end
# Vars
defp compare_var({name, meta, context} = left, right, env) do
identifier = {name, meta[:counter] || context}
case env.current_vars do
%{^identifier => ^right} ->
diff_right = escape(right)
diff = %__MODULE__{equivalent?: true, left: left, right: diff_right}
{diff, env}
%{^identifier => _} ->
diff_left = update_diff_meta(left, true)
diff_right = escape(right) |> update_diff_meta(true)
diff = %__MODULE__{equivalent?: false, left: diff_left, right: diff_right}
{diff, env}
current_vars = %{} ->
updated_vars = Map.put(current_vars, identifier, right)
diff_right = escape(right)
diff = %__MODULE__{equivalent?: true, left: left, right: diff_right}
{diff, %{env | current_vars: updated_vars}}
end
end
# Tuples
defp compare_tuple({:{}, _, left_list}, right, %{context: :match} = env) do
compare_tuple_items(left_list, Tuple.to_list(right), env)
end
defp compare_tuple(left, right, env) do
compare_tuple_items(Tuple.to_list(left), Tuple.to_list(right), env)
end
defp compare_tuple_items(list_left, list_right, env) do
{compared, non_compared_left, non_compared_right, post_env} =
compare_tuple_items_by_index(list_left, list_right, env)
remaining_diff = compare_tuple_remaining_items(non_compared_left, non_compared_right)
{build_tuple_result(compared, remaining_diff), post_env}
end
defp compare_tuple_items_by_index(list_left, list_right, env) do
{compared, non_compared_left, non_compared_right, post_env} =
Enum.reduce(list_left, {[], [], list_right, env}, fn
item, {compared, non_compared, [next | continue], acc_env} ->
{diff, diff_post_env} = compare_quoted(item, next, acc_env)
{[diff | compared], non_compared, continue, diff_post_env}
item, {compared, non_compared, [], acc_env} ->
{compared, [item | non_compared], [], acc_env}
end)
{
Enum.reverse(compared),
Enum.reverse(non_compared_left),
Enum.reverse(non_compared_right),
post_env
}
end
defp compare_tuple_remaining_items([], []) do
%__MODULE__{
equivalent?: true,
left: {:{}, [], []},
right: {:{}, [], []}
}
end
defp compare_tuple_remaining_items(left, right) do
left = Enum.map(left, &update_diff_meta(&1, true))
right = Enum.map(right, &update_diff_meta(&1, true))
diff_left = {:{}, [], left}
diff_right = {:{}, [], right}
%__MODULE__{equivalent?: false, left: diff_left, right: diff_right}
end
defp build_tuple_result([], remaining_diff) do
remaining_diff
end
defp build_tuple_result([head | tail], remaining_diff) do
tail_result = build_tuple_result(tail, remaining_diff)
prepend_diff(head, tail_result)
end
# Lists
defp compare_maybe_list(left, right, env) do
if List.ascii_printable?(left) and List.ascii_printable?(right) do
compare_string(List.to_string(left), List.to_string(right), ?', env)
else
compare_maybe_improper_list(left, right, env)
end
end
# Compare two lists, removing all the operators (`|` and `++`) before and
# adding them back in the end. When the `left` contains a improper element
# it will extract forcefully a improper element on the `right` for matching
# purposes.
defp compare_maybe_improper_list(left, right, env) do
{parsed_left, improper_left, operators_left, length_left} = parse_list(left, 0)
{parsed_right, improper_right, operators_right, _} = parse_list(right, 0)
{parsed_right, improper_right, split?} =
split_list(parsed_right, length_left, improper_right, improper_left)
{parsed_diff, parsed_post_env} = myers_difference_list(parsed_left, parsed_right, env)
{improper_diff, improper_post_env, improper_diff?} =
compare_improper(improper_left, improper_right, parsed_post_env, split?)
diff =
merge_diff(parsed_diff, improper_diff, fn left1, left2, right1, right2 ->
left = rebuild_list(left1, left2, operators_left, improper_diff?)
right =
if split? do
rebuild_split_lists(right1, right2)
else
rebuild_list(right1, right2, operators_right, improper_diff?)
end
{left, right}
end)
{diff, improper_post_env}
end
defp compare_improper({:element, left}, {:element, right}, env, split?) do
{diff, post_env} = compare_quoted(left, right, env)
{diff, post_env, split?}
end
defp compare_improper({:element, left}, :empty, env, _split?) do
diff_left = update_diff_meta(left, true)
diff = %__MODULE__{equivalent?: false, left: diff_left}
{diff, env, true}
end
defp compare_improper(:empty, {:element, right}, env, _split?) do
diff_right = escape(right) |> update_diff_meta(true)
diff = %__MODULE__{equivalent?: false, right: diff_right}
{diff, env, true}
end
defp compare_improper(:empty, :empty, env, _split?) do
diff = %__MODULE__{equivalent?: true}
{diff, env, false}
end
defp parse_list([], _index) do
{[], :empty, nil, 0}
end
defp parse_list({:++, _, [left, right]}, _index) do
{parsed_left, :empty, operators_left, length_left} = parse_list(left, 0)
case parse_list(right, 0) do
{:improper, improper} ->
operators = {:++, length_left, [operators_left]}
{parsed_left, {:element, improper}, operators, length_left}
{parsed_right, improper_right, operators_right, length_right} ->
operators = {:++, length_left, [operators_left, operators_right]}
length = length_right + length_left
{parsed_left ++ parsed_right, improper_right, operators, length}
end
end
defp parse_list([{:|, _, [head, tail]}], index) do
case parse_list(tail, 0) do
{:improper, improper} ->
operator = {:|, index, []}
{[head], {:element, improper}, operator, 1}
{parsed_tail, improper_tail, operators_tail, length_tail} ->
operators = {:|, index, [operators_tail]}
{[head | parsed_tail], improper_tail, operators, length_tail + 1}
end
end
defp parse_list([head | tail], index) do
case parse_list(tail, index + 1) do
{:improper, improper} ->
operator = {:|, index, []}
{[head], {:element, improper}, operator, 1}
{parsed_tail, improper_tail, operators_tail, length_tail} ->
{[head | parsed_tail], improper_tail, operators_tail, length_tail + 1}
end
end
defp parse_list(element, _index) do
{:improper, element}
end
defp rebuild_list(list, _improper = nil, _operators = nil, _improper_diff?) do
list
end
defp rebuild_list(list, improper, {:|, index, []}, _improper_diff?) do
{left, [head]} = Enum.split(list, index)
left ++ [{:|, [], [head, improper]}]
end
defp rebuild_list(list, improper, {:|, index, [operators]}, improper_diff?) do
{left, [head | tail]} = Enum.split(list, index)
rebuilt_tail = rebuild_list(tail, improper, operators, improper_diff?)
rebuilt_tail =
if is_nil(operators) do
update_diff_meta(rebuilt_tail, improper_diff?)
else
rebuilt_tail
end
left ++ [{:|, [], [head, rebuilt_tail]}]
end
defp rebuild_list(list, improper, {:++, _index, [operators]}, _improper_diff?) do
rebuilt_list = rebuild_list(list, nil, operators, false)
{:++, [], [rebuilt_list, improper]}
end
defp rebuild_list(list, improper, {:++, index, operators}, improper_diff?) do
[operators_left, operators_right] = operators
{left, right} = Enum.split(list, index)
rebuilt_left = rebuild_list(left, nil, operators_left, false)
rebuilt_right = rebuild_list(right, improper, operators_right, improper_diff?)
rebuilt_right =
if is_nil(operators) do
update_diff_meta(rebuilt_right, improper_diff?)
else
rebuilt_right
end
{:++, [], [rebuilt_left, rebuilt_right]}
end
defp split_list(list, index, :empty, {:element, _element}) do
case Enum.split(list, index) do
{left, []} -> {left, :empty, false}
{left, right} -> {left, {:element, right}, true}
end
end
defp split_list(list, _index, improper, _improper_left) do
{list, improper, false}
end
defp rebuild_split_lists(left, right) do
updated_right =
case extract_diff_meta(right) do
{list, true} -> Enum.map(list, &update_diff_meta(&1, true))
{list, false} -> list
end
left ++ updated_right
end
defp myers_difference_list(left, right, env) do
path = {0, left, right, {[], [], env}}
find_diff(0, length(left) + length(right), [path])
end
defp find_diff(envelope, max, paths) do
case each_diagonal(-envelope, envelope, paths, []) do
{:done, {edit1, edit2, env}} ->
list_script_to_result(Enum.reverse(edit1), Enum.reverse(edit2), env)
{:next, paths} ->
find_diff(envelope + 1, max, paths)
end
end
defp each_diagonal(diag, limit, _paths, next_paths) when diag > limit do
{:next, Enum.reverse(next_paths)}
end
defp each_diagonal(diag, limit, paths, next_paths) do
{path, rest} = proceed_path(diag, limit, paths)
case follow_snake(path) do
{:cont, path} -> each_diagonal(diag + 2, limit, rest, [path | next_paths])
{:done, edits} -> {:done, edits}
end
end
defp proceed_path(0, 0, [path]), do: {path, []}
defp proceed_path(diag, limit, [path | _] = paths) when diag == -limit do
{move_down(path), paths}
end
defp proceed_path(diag, limit, [path]) when diag == limit do
{move_right(path), []}
end
defp proceed_path(_diag, _limit, [path1, path2 | rest]) do
if elem(path1, 0) > elem(path2, 0) do
{move_right(path1), [path2 | rest]}
else
{move_down(path2), [path2 | rest]}
end
end
defp move_right({y, list1, [elem2 | rest2], {edit1, edit2, env}}) do
{y, list1, rest2, {edit1, [{:ins, elem2} | edit2], env}}
end
defp move_right({y, list1, [], edits}) do
{y, list1, [], edits}
end
defp move_down({y, [elem1 | rest1], list2, {edit1, edit2, env}}) do
{y + 1, rest1, list2, {[{:del, elem1} | edit1], edit2, env}}
end
defp move_down({y, [], list2, edits}) do
{y + 1, [], list2, edits}
end
defp follow_snake({y, [elem1 | rest1], [elem2 | rest2], {edit1, edit2, env}} = path) do
{diff, post_env} = compare_quoted(elem1, elem2, env)
if diff.equivalent? do
new_edit1 = [{:eq, diff.left} | edit1]
new_edit2 = [{:eq, diff.right} | edit2]
follow_snake({y + 1, rest1, rest2, {new_edit1, new_edit2, post_env}})
else
{:cont, path}
end
end
defp follow_snake({_y, [], [], edits}) do
{:done, edits}
end
defp follow_snake(path) do
{:cont, path}
end
defp list_script_to_result([], [], env) do
diff = %__MODULE__{equivalent?: true, left: [], right: []}
{diff, env}
end
defp list_script_to_result([{:del, elem1} | rest1], [{:ins, elem2} | rest2], env) do
{elem_diff, elem_post_env} = compare_quoted(elem1, elem2, env)
{rest_diff, rest_post_env} = list_script_to_result(rest1, rest2, elem_post_env)
{prepend_diff(elem_diff, rest_diff), rest_post_env}
end
defp list_script_to_result([{:del, elem1} | rest1], list2, env) do
diff_left = update_diff_meta(elem1, true)
elem_diff = %__MODULE__{equivalent?: false, left: diff_left}
{rest_diff, rest_post_env} = list_script_to_result(rest1, list2, env)
{prepend_diff(elem_diff, rest_diff), rest_post_env}
end
defp list_script_to_result(list1, [{:ins, elem2} | rest2], env) do
diff_right = escape(elem2) |> update_diff_meta(true)
elem_diff = %__MODULE__{equivalent?: false, right: diff_right}
{rest_diff, rest_post_env} = list_script_to_result(list1, rest2, env)
{prepend_diff(elem_diff, rest_diff), rest_post_env}
end
defp list_script_to_result([{:eq, elem1} | rest1], [{:eq, elem2} | rest2], env) do
elem_diff = %__MODULE__{equivalent?: true, left: elem1, right: elem2}
{rest_diff, rest_post_env} = list_script_to_result(rest1, rest2, env)
{prepend_diff(elem_diff, rest_diff), rest_post_env}
end
# Maps
defp compare_map(%{} = left, right, struct1, struct2, env) do
compare_map_items(left, right, struct1, struct2, env)
end
defp compare_map({:%{}, _, items}, right, struct1, struct2, env) do
compare_map_items(items, right, struct1, struct2, env)
end
# Compare items based on the keys of `left_items` and add the `:diff` meta to
# the element that it wasn't able to compare.
defp compare_map_items(left_items, right, struct1, struct2, env) do
{non_comparable_by_key, remaining, compared, struct1, by_key_post_env} =
compare_map_items_by_key(left_items, right, struct1, env)
remaining_diff = compare_map_remaining_pairs(non_comparable_by_key, remaining, env)
struct_diff = build_struct_result(struct1, struct2)
map_diff = build_map_result(compared, remaining_diff)
{prepend_diff(struct_diff, map_diff), by_key_post_env}
end
defp compare_map_items_by_key(items, right, defined_struct, env) do
{non_comparable, remaining, compared, items_struct, post_env} =
Enum.reduce(items, {[], right, [], nil, env}, fn
{:__struct__, name}, acc ->
put_elem(acc, 3, name)
{key, _} = item, {non_comparable, remaining, compared, struct, acc_env} ->
literal_key = literal_key(key, env)
case Map.pop(remaining, literal_key) do
{nil, ^remaining} ->
{[item | non_comparable], remaining, [nil | compared], struct, acc_env}
{popped, new_remaining} ->
{diff, diff_post_env} = compare_map_pair(item, {literal_key, popped}, acc_env)
{non_comparable, new_remaining, [diff | compared], struct, diff_post_env}
end
end)
non_comparable = Enum.reverse(non_comparable)
remaining = Map.delete(remaining, :__struct__)
compared = Enum.reverse(compared)
defined_struct =
case defined_struct || items_struct do
{_, [{:expanded, name} | _], _} -> name
other -> other
end
{non_comparable, remaining, compared, defined_struct, post_env}
end
defp compare_map_pair({key1, value1}, {key2, value2}, env) do
{diff, post_env} = compare_quoted(value1, value2, env)
diff_left = {key1, diff.left}
diff_right = {key2, diff.right}
{%{diff | left: diff_left, right: diff_right}, post_env}
end
defp literal_key({:^, _, [{name, _, _}]}, %{pins: pins}) do
%{^name => pin_value} = pins
pin_value
end
defp literal_key(literal, _env) do
literal
end
# Can't compare using `myers_difference_list` because if key and value are
# equivalent, it gives strange results. It just mark them as different
# depending on the context, if `:match` only left side, otherwise both sides.
defp compare_map_remaining_pairs(remaining, right, env) do
list_left = Enum.map(remaining, &update_diff_meta(&1, true))
list_right =
if env.context == :match do
Map.to_list(right)
else
Enum.map(right, &update_diff_meta(&1, true))
end
diff_left = {:%{}, [], list_left}
diff_right = {:%{}, [], list_right}
equivalent? =
if env.context == :match do
remaining == []
else
remaining == [] && right == %{}
end
%__MODULE__{equivalent?: equivalent?, left: diff_left, right: diff_right}
end
defp build_map_result([], remaining_diff) do
remaining_diff
end
defp build_map_result([nil | tail], remaining_diff) do
{popped, new_remaining_diff} = pop_diff(remaining_diff)
tail_result = build_map_result(tail, new_remaining_diff)
prepend_diff(popped, tail_result)
end
defp build_map_result([head | tail], remaining_diff) do
tail_result = build_map_result(tail, remaining_diff)
prepend_diff(head, tail_result)
end
# Structs
defp compare_struct({:%, _, [struct1, left_map]}, %struct2{} = right, env) do
compare_struct(left_map, Map.from_struct(right), struct1, struct2, env)
end
defp compare_struct({:%, _, [struct1, left_map]}, right, env) do
compare_struct(left_map, right, struct1, nil, env)
end
defp compare_struct({:%{}, _, [{:__struct__, struct1} | left_items]}, %struct2{} = right, env) do
compare_struct({:%{}, [], left_items}, right, struct1, struct2, env)
end
defp compare_struct(%struct1{} = left, %struct2{} = right, env) do
compare_struct(left, right, struct1, struct2, env)
end
defp compare_struct(%struct1{} = left, right, env) do
compare_struct(left, right, struct1, nil, env)
end
defp compare_struct(left, %struct2{} = right, env) do
compare_map(left, right, nil, struct2, env)
end
defp compare_struct(left, right, env) do
compare_map(left, right, nil, nil, env)
end
defp compare_struct(%{} = left, right, struct1, struct2, env) do
if Inspect.impl_for(left) not in [Inspect.Any, Inspect.Map] do
inspect_left = inspect(left)
inspect_right = inspect(right)
if inspect_left != inspect_right do
compare_string(inspect_left, inspect_right, ?\", env)
else
compare_map(Map.from_struct(left), right, struct1, struct2, env)
end
else
compare_map(Map.from_struct(left), right, struct1, struct2, env)
end
end
defp compare_struct(left_map, right, struct1, struct2, env) do
try do
pid = self()
ExUnit.CaptureIO.capture_io(:stderr, fn ->
{items, _} = Code.eval_quoted(left_map)
send(pid, {:struct, struct(struct1, items)})
end)
receive do
{:struct, left} -> compare_struct(left, right, struct1, struct2, env)
end
rescue
_ -> compare_map(left_map, right, struct1, struct2, env)
end
end
defp build_struct_result(nil, nil) do
%__MODULE__{equivalent?: true}
end
defp build_struct_result(struct1, nil) do
diff_left = update_diff_meta({:__struct__, struct1}, true)
%__MODULE__{equivalent?: true, left: diff_left}
end
defp build_struct_result(nil, struct2) do
diff_right = update_diff_meta({:__struct__, struct2}, true)
%__MODULE__{equivalent?: true, right: diff_right}
end
defp build_struct_result(struct, struct) do
struct_pair = {:__struct__, struct}
%__MODULE__{equivalent?: true, left: struct_pair, right: struct_pair}
end
defp build_struct_result(struct1, struct2) do
diff_left = update_diff_meta({:__struct__, struct1}, true)
diff_right = update_diff_meta({:__struct__, struct2}, true)
%__MODULE__{equivalent?: true, left: diff_left, right: diff_right}
end
# Strings
defp compare_string(left, right, delimiter, env) do
diff =
cond do
diff_string?(left, right) ->
{escaped_left, _} = Code.Identifier.escape(left, delimiter)
{escaped_right, _} = Code.Identifier.escape(right, delimiter)
left = IO.iodata_to_binary(escaped_left)
right = IO.iodata_to_binary(escaped_right)
String.myers_difference(left, right) |> string_script_to_diff(delimiter)
left == right ->
string_script_to_diff([eq: left], delimiter)
true ->
string_script_to_diff([del: left, ins: right], delimiter)
end
{diff, env}
end
# Concat all the literals on `left` and split `right` based on the size of
# that, comparing them and the remaining AST from `left` and the remaining
# string from `right`.
defp compare_string_concat(left, right, env) do
{parsed_left, quoted, indexes, parsed_left_length} = parse_string(left)
compare_string_concat(parsed_left, quoted, indexes, parsed_left_length, right, env)
end
defp compare_string_concat(left, nil, indexes, _left_length, right, env) do
{parsed_diff, parsed_post_env} = compare_string(left, right, ?\", env)
left_diff = rebuild_concat_string(parsed_diff.left, nil, indexes)
diff = %__MODULE__{parsed_diff | left: left_diff}
{diff, parsed_post_env}
end
defp compare_string_concat(left, quoted, indexes, left_length, right, env) do
{parsed_right, continue_right} = String.split_at(right, left_length)
{parsed_diff, parsed_post_env} = compare_string(left, parsed_right, ?\", env)
{quoted_diff, quoted_post_env} = compare_quoted(quoted, continue_right, parsed_post_env)
diff =
merge_diff(parsed_diff, quoted_diff, fn left1, left2, right1, right2 ->
new_left = rebuild_concat_string(left1, left2, indexes)
new_right = rebuild_split_strings(right1, right2)
{new_left, new_right}
end)
{diff, quoted_post_env}
end
defp diff_string?(left, right) do
String.bag_distance(left, right) > 0.4
end
defp parse_string({:<>, _, [literal, rest]}) do
{parsed, quoted, indexes, parsed_length} = parse_string(rest)
literal_length = String.length(literal)
length = literal_length + parsed_length
{literal <> parsed, quoted, [literal_length | indexes], length}
end
defp parse_string(literal) when is_binary(literal) do
{literal, nil, [], String.length(literal)}
end
defp parse_string(pattern) do
{"", pattern, [], 0}
end
defp rebuild_split_strings(left, "") do
left
end
defp rebuild_split_strings({:__block__, meta, left_list}, {:__block__, _, right_list}) do
{:__block__, meta, left_list ++ right_list}
end
defp rebuild_split_strings({:__block__, meta, left_list}, right) do
{:__block__, meta, left_list ++ [right]}
end
defp rebuild_concat_string(literal, nil, []) do
literal
end
defp rebuild_concat_string(_literal, quoted, []) do
quoted
end
defp rebuild_concat_string(literal, quoted, [index | rest]) do
{next, continue} = next_concat_result(literal, index)
rebuilt_right = rebuild_concat_string(continue, quoted, rest)
{:<>, [], [next, rebuilt_right]}
end
defp next_concat_result({:__block__, [{:diff_container, _} | _] = meta, list}, index) do
{next, continue} = next_concat_result(list, index)
{{:__block__, meta, next}, {:__block__, meta, continue}}
end
defp next_concat_result([head | tail], index) do
{string, diff_meta?} = extract_diff_meta(head)
length = String.length(string)
cond do
length > index ->
{next, continue} = String.split_at(string, index)
next = [update_diff_meta(next, diff_meta?)]
continue = [update_diff_meta(continue, diff_meta?) | tail]
{next, continue}
length < index ->
{next, continue} = next_concat_result(tail, index - length)
{[head | next], continue}
true ->
{[head], tail}
end
end
defp string_script_to_diff([], delimiter) do
container = {:__block__, [diff_container: delimiter], []}
%__MODULE__{equivalent?: true, left: container, right: container}
end
defp string_script_to_diff([{:eq, string} | tail], delimiter) do
head_diff = %__MODULE__{equivalent?: true, left: string, right: string}
tail_diff = string_script_to_diff(tail, delimiter)
prepend_diff(head_diff, tail_diff)
end
defp string_script_to_diff([{:del, string} | tail], delimiter) do
left = update_diff_meta(string, true)
head_diff = %__MODULE__{equivalent?: false, left: left, right: nil}
tail_diff = string_script_to_diff(tail, delimiter)
prepend_diff(head_diff, tail_diff)
end
defp string_script_to_diff([{:ins, string} | tail], delimiter) do
right = update_diff_meta(string, true)
head_diff = %__MODULE__{equivalent?: false, left: nil, right: right}
tail_diff = string_script_to_diff(tail, delimiter)
prepend_diff(head_diff, tail_diff)
end
defp remove_diff_container_meta({:__block__, meta, list}) do
{:__block__, Keyword.delete(meta, :diff_container), list}
end
# Numbers
defp compare_number(left, right, env) do
{diff, post_env} = compare_string(inspect(left), inspect(right), ?\", env)
diff_left = remove_diff_container_meta(diff.left)
diff_right = remove_diff_container_meta(diff.right)
{%{diff | left: diff_left, right: diff_right}, post_env}
end
# Algebra
@doc """
Converts a diff to an algebra document.
"""
def to_algebra(quoted, diff_wrapper) do
wrap_on_diff(quoted, &safe_to_algebra/2, diff_wrapper)
end
defp safe_to_algebra({:__block__, meta, list}, diff_wrapper) do
content_docs = Enum.map(list, &string_to_algebra(&1, diff_wrapper))
if container = meta[:diff_container] do
delimiter = to_string([container])
Algebra.concat([delimiter] ++ content_docs ++ [delimiter])
else
Algebra.concat(content_docs)
end
end
defp safe_to_algebra(list, diff_wrapper) when is_list(list) do
container_to_algebra("[", list, "]", diff_wrapper, select_list_item_algebra(list))
end
defp safe_to_algebra({op, _, [left, right]}, diff_wrapper) when op in [:<>, :++, :|] do
binary_op_to_algebra(left, " #{op} ", right, diff_wrapper)
end
defp safe_to_algebra({:{}, _, args}, diff_wrapper) do
container_to_algebra("{", args, "}", diff_wrapper, &to_algebra/2)
end
defp safe_to_algebra({a, b}, diff_wrapper) do
container_to_algebra("{", [a, b], "}", diff_wrapper, &to_algebra/2)
end
defp safe_to_algebra({:%{}, _, [head | tail]}, diff_wrapper) do
{struct, list} =
case extract_diff_meta(head) do
{{:__struct__, name}, true} -> {update_diff_meta(name, true), tail}
{{:__struct__, name}, false} -> {name, tail}
_other -> {nil, [head | tail]}
end
open =
if struct do
Algebra.concat(["%", struct_to_algebra(struct, diff_wrapper), "{"])
else
"%{"
end
container_to_algebra(open, list, "}", diff_wrapper, select_map_item_to_algebra(list))
end
defp safe_to_algebra({_, _, _} = quoted, _diff_wrapper) do
Macro.to_string(quoted)
end
defp safe_to_algebra(literal, _diff_wrapper) do
inspect(literal)
end
def string_to_algebra(quoted, diff_wrapper) do
wrap_on_diff(quoted, &safe_string_to_algebra/2, diff_wrapper)
end
def safe_string_to_algebra(literal, _diff_wrapper) do
literal
end
defp keyword_to_algebra(quoted, diff_wrapper) do
wrap_on_diff(quoted, &safe_keyword_to_algebra/2, diff_wrapper)
end
defp safe_keyword_to_algebra({:{}, _, [key, value]}, diff_wrapper) do
keyword_to_algebra({key, value}, diff_wrapper)
end
defp safe_keyword_to_algebra({key, value}, diff_wrapper) do
key_doc = key_to_algebra(key, diff_wrapper)
value_doc = to_algebra(value, diff_wrapper) |> Algebra.nest(:cursor)
key_doc
|> Algebra.glue(" ", value_doc)
|> Algebra.group()
end
defp key_to_algebra(quoted, diff_wrapper) do
wrap_on_diff(quoted, &safe_key_to_algebra/2, diff_wrapper)
end
defp safe_key_to_algebra(key, _diff_wrapper) do
Identifier.inspect_as_key(key)
end
defp map_item_to_algebra(quoted, diff_wrapper) do
wrap_on_diff(quoted, &safe_map_item_to_algebra/2, diff_wrapper)
end
defp safe_map_item_to_algebra({:{}, _, [key, value]}, diff_wrapper) do
safe_map_item_to_algebra({key, value}, diff_wrapper)
end
defp safe_map_item_to_algebra({key, value}, diff_wrapper) do
key_doc = to_algebra(key, diff_wrapper)
value_doc = to_algebra(value, diff_wrapper) |> Algebra.nest(:cursor)
key_doc
|> Algebra.glue(" => ", value_doc)
|> Algebra.group()
end
defp binary_op_to_algebra(left, op, right, diff_wrapper) do
left_doc = to_algebra(left, diff_wrapper)
right_doc = to_algebra(right, diff_wrapper) |> Algebra.nest(:cursor)
left_doc
|> Algebra.glue(op, right_doc)
|> Algebra.group()
end
defp container_to_algebra(open, list, close, diff_wrapper, item_to_algebra) do
docs =
list
|> Enum.map(&item_to_algebra.(&1, diff_wrapper))
|> Algebra.fold_doc(&join_docs/2)
|> Algebra.nest(1)
[open, docs, close]
|> Algebra.concat()
|> Algebra.group()
end
defp join_docs(doc1, doc2) do
doc1
|> Algebra.concat(",")
|> Algebra.flex_glue(doc2)
end
defp struct_to_algebra(quoted, diff_wrapper) do
wrap_on_diff(quoted, &safe_struct_to_algebra/2, diff_wrapper)
end
defp safe_struct_to_algebra(name, _diff_wrapper) do
Code.Identifier.inspect_as_atom(name)
end
defp select_list_item_algebra(list) do
short? = Enum.all?(list, &keyword?/1)
if short?, do: &keyword_to_algebra/2, else: &to_algebra/2
end
defp select_map_item_to_algebra(list) do
short? = Enum.all?(list, &keyword?/1)
if short?, do: &keyword_to_algebra/2, else: &map_item_to_algebra/2
end
defp wrap_on_diff(quoted, fun, wrapper) do
case extract_diff_meta(quoted) do
{expr, true} -> fun.(expr, & &1) |> wrapper.()
{expr, false} -> fun.(expr, wrapper)
end
end
# Diff helpers
defp escape([head | tail]) when is_list(tail) do
[escape(head) | escape(tail)]
end
defp escape([head | tail]) do
[{:|, [], [escape(head), escape(tail)]}]
end
defp escape({a, b}) do
{escape(a), escape(b)}
end
defp escape(tuple) when is_tuple(tuple) do
list = Tuple.to_list(tuple)
{:{}, [], escape(list)}
end
defp escape(%_{} = struct) do
{:%{}, [], Map.to_list(struct) |> escape()}
end
defp escape(%{} = map) do
list = Map.to_list(map)
{:%{}, [], escape(list)}
end
defp escape(other) do
other
end
defp merge_diff(%__MODULE__{} = result1, %__MODULE__{} = result2, fun) do
{left, right} = fun.(result1.left, result2.left, result1.right, result2.right)
%__MODULE__{
equivalent?: result1.equivalent? && result2.equivalent?,
left: left,
right: right
}
end
defp prepend_diff(%__MODULE__{} = head_result, %__MODULE__{} = tail_result) do
merge_diff(head_result, tail_result, fn pattern1, pattern2, value1, value2 ->
{prepend_result(pattern1, pattern2), prepend_result(value1, value2)}
end)
end
defp pop_diff(%__MODULE__{} = diff) do
{popped_left, remaining_left} = pop_result(diff.left)
{popped_right, remaining_right} = pop_result(diff.right)
remaining_diff = %{diff | left: remaining_left, right: remaining_right}
case {popped_left, popped_right} do
{:empty, :empty} ->
{nil, remaining_diff}
{{:element, elem1}, :empty} ->
{%{diff | left: elem1, right: nil}, remaining_diff}
{:empty, {:element, elem2}} ->
{%{diff | left: nil, right: elem2}, remaining_diff}
{{:element, elem1}, {:element, elem2}} ->
{%{diff | left: elem1, right: elem2}, remaining_diff}
end
end
defp prepend_result(nil, quoted) do
quoted
end
defp prepend_result(item, quoted) do
{extracted, diff_meta?} = extract_diff_meta(quoted)
safe_prepend_result(item, extracted) |> update_diff_meta(diff_meta?)
end
defp safe_prepend_result(item, {type, meta, list}), do: {type, meta, [item | list]}
defp safe_prepend_result(item, list), do: [item | list]
defp pop_result(quoted) do
{extracted, _diff_meta?} = extract_diff_meta(quoted)
safe_pop_result(extracted)
end
defp safe_pop_result({_, _, []} = quoted) do
{:empty, quoted}
end
defp safe_pop_result({left, meta, [head | tail]}) do
{{:element, head}, {left, meta, tail}}
end
defp update_diff_meta({left, meta, right}, false),
do: {left, Keyword.delete(meta, :diff), right}
defp update_diff_meta({left, meta, right}, true),
do: {left, [{:diff, true} | Keyword.delete(meta, :diff)], right}
defp update_diff_meta(literal, false),
do: literal
defp update_diff_meta(literal, true),
do: {:__block__, [diff: true], [literal]}
defp extract_diff_meta({:__block__, [diff: true], [literal]}), do: {literal, true}
defp extract_diff_meta({left, meta, right}), do: {{left, meta, right}, !!meta[:diff]}
defp extract_diff_meta(other), do: {other, false}
defp keyword?(quoted) do
{pair, _} = extract_diff_meta(quoted)
safe_keyword?(pair)
end
defp safe_keyword?({key, _value}), do: key_is_atom?(key)
defp safe_keyword?({:{}, _meta, [key, _value]}), do: key_is_atom?(key)
defp safe_keyword?(_other), do: false
def key_is_atom?(quoted) do
{key, _} = extract_diff_meta(quoted)
:erlang.is_atom(key)
end
end
| 30.9181 | 99 | 0.6643 |
03cfe60b12cb3caf78455f2cedcab740ff8a9eac | 2,208 | ex | Elixir | clients/vision/lib/google_api/vision/v1/model/output_config.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/output_config.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/output_config.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Vision.V1.Model.OutputConfig do
@moduledoc """
The desired output location and metadata.
## Attributes
* `batchSize` (*type:* `integer()`, *default:* `nil`) - The max number of response protos to put into each output JSON file on
Google Cloud Storage.
The valid range is [1, 100]. If not specified, the default value is 20.
For example, for one pdf file with 100 pages, 100 response protos will
be generated. If `batch_size` = 20, then 5 json files each
containing 20 response protos will be written under the prefix
`gcs_destination`.`uri`.
Currently, batch_size only applies to GcsDestination, with potential future
support for other output configurations.
* `gcsDestination` (*type:* `GoogleApi.Vision.V1.Model.GcsDestination.t`, *default:* `nil`) - The Google Cloud Storage location to write the output(s) to.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:batchSize => integer(),
:gcsDestination => GoogleApi.Vision.V1.Model.GcsDestination.t()
}
field(:batchSize)
field(:gcsDestination, as: GoogleApi.Vision.V1.Model.GcsDestination)
end
defimpl Poison.Decoder, for: GoogleApi.Vision.V1.Model.OutputConfig do
def decode(value, options) do
GoogleApi.Vision.V1.Model.OutputConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Vision.V1.Model.OutputConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.8 | 158 | 0.727808 |
03cff7b55f50d28df8bd522fd6d0278ee37a6f72 | 818 | ex | Elixir | elixir_rakv/lib/ra_kv.ex | OCLC-Research/ra-examples | cf7bb8813562e35c4bd0d6e1032f127f72f376de | [
"Apache-2.0"
] | null | null | null | elixir_rakv/lib/ra_kv.ex | OCLC-Research/ra-examples | cf7bb8813562e35c4bd0d6e1032f127f72f376de | [
"Apache-2.0"
] | null | null | null | elixir_rakv/lib/ra_kv.ex | OCLC-Research/ra-examples | cf7bb8813562e35c4bd0d6e1032f127f72f376de | [
"Apache-2.0"
] | null | null | null | defmodule RaKv do
@moduledoc """
Documentation for RaKv.
"""
def start(_args) do
# the initial cluster members
members = Enum.map([:a@localhost, :b@localhost, :c@localhost], fn node -> { :rakv, node } end)
# an arbitrary cluster name
clusterName = <<"ra_kv">>
# the config passed to `init/1`, must be a `map`
config = %{}
# the machine configuration
machine = {:module, RaKv.Machine, config}
# ensure ra is started
Application.ensure_all_started(:ra)
# start a cluster instance running the `ra_kv` machine
:ra.start_cluster(clusterName, machine, members)
end
## Client API
def put(serverid, key, value) do
:ra.process_command(serverid, {:put, key, value})
end
def get(serverid, key) do
:ra.process_command(serverid, {:get, key})
end
end
| 25.5625 | 98 | 0.654034 |
03cffbaeb6b54c1b399a360e093af5adf6fcd7b1 | 2,794 | ex | Elixir | clients/document_ai/lib/google_api/document_ai/v1/model/google_cloud_documentai_v1beta2_document_page_line.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/document_ai/lib/google_api/document_ai/v1/model/google_cloud_documentai_v1beta2_document_page_line.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/document_ai/lib/google_api/document_ai/v1/model/google_cloud_documentai_v1beta2_document_page_line.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1beta2DocumentPageLine do
@moduledoc """
A collection of tokens that a human would perceive as a line. Does not cross column boundaries, can be horizontal, vertical, etc.
## Attributes
* `detectedLanguages` (*type:* `list(GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1beta2DocumentPageDetectedLanguage.t)`, *default:* `nil`) - A list of detected languages together with confidence.
* `layout` (*type:* `GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1beta2DocumentPageLayout.t`, *default:* `nil`) - Layout for Line.
* `provenance` (*type:* `GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1beta2DocumentProvenance.t`, *default:* `nil`) - The history of this annotation.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:detectedLanguages =>
list(
GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1beta2DocumentPageDetectedLanguage.t()
)
| nil,
:layout =>
GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1beta2DocumentPageLayout.t() | nil,
:provenance =>
GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1beta2DocumentProvenance.t() | nil
}
field(:detectedLanguages,
as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1beta2DocumentPageDetectedLanguage,
type: :list
)
field(:layout, as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1beta2DocumentPageLayout)
field(:provenance,
as: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1beta2DocumentProvenance
)
end
defimpl Poison.Decoder,
for: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1beta2DocumentPageLine do
def decode(value, options) do
GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1beta2DocumentPageLine.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DocumentAI.V1.Model.GoogleCloudDocumentaiV1beta2DocumentPageLine do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.352113 | 207 | 0.750537 |
03d000c25159c28be9475562f5e59d5a74f76eec | 3,087 | exs | Elixir | test/islands_engine/board_test.exs | msramos/islands_engine | 6171d24aa937f80878251a0a3c575ac7023d5e8e | [
"MIT"
] | null | null | null | test/islands_engine/board_test.exs | msramos/islands_engine | 6171d24aa937f80878251a0a3c575ac7023d5e8e | [
"MIT"
] | null | null | null | test/islands_engine/board_test.exs | msramos/islands_engine | 6171d24aa937f80878251a0a3c575ac7023d5e8e | [
"MIT"
] | null | null | null | defmodule IslandsEngine.BoardTest do
use ExUnit.Case, async: true
alias IslandsEngine.{Board, Island, Coordinate}
doctest Board
@sut Board
describe "new/0" do
test "creates a new empty board" do
expected = %{}
result = @sut.new()
assert result == expected
end
end
describe "position_island/3" do
test "adds an island and return the updated board in an ok tuple" do
board = %{}
island = island(:square, 3, 3)
expected =
{:ok,
%{
square: island
}}
result = @sut.position_island(board, :square, island)
assert result == expected
end
test "returns an error if the island overlaps with an existing island in the board" do
board = %{
square: island(:square, 4, 4)
}
island = island(:s_shape, 5, 2)
expected = {:error, :overlapping_island}
result = @sut.position_island(board, :s_shape, island)
assert result == expected
end
end
describe "all_islands_positioned?/1" do
test "returns true if all types of islands exist on the board" do
board = Island.types() |> Map.new(fn type -> {type, :whatever} end)
result = @sut.all_islands_positioned?(board)
assert result == true
end
test "returns false if the board does not contains all island types" do
board = %{square: island(:square, 1, 1), atoll: island(:atoll, 4, 4)}
result = @sut.all_islands_positioned?(board)
assert result == false
end
end
describe "guess/2" do
test "returns a hit tuple when the coordinate hits an island but no player won" do
board = %{square: island(:square, 1, 1)}
guess = coord(2, 2)
expected = {
:hit,
:none,
:no_win,
put_in(board.square.hit_coordinates, coords([{2, 2}]))
}
result = @sut.guess(board, guess)
assert result == expected
end
test "returns a miss tuple when the coordinate does not hit any island" do
board = %{square: island(:square, 1, 1)}
guess = coord(3, 2)
expected = {
:miss,
:none,
:no_win,
board
}
result = @sut.guess(board, guess)
assert result == expected
end
test "returns a hit tuple with a win value when the guess hits the last coordinate" do
board = %{square: island(:square, 1, 1)}
board = put_in(board.square.hit_coordinates, coords([{1, 1}, {1, 2}, {2, 1}]))
guess = coord(2, 2)
expected = {
:hit,
:square,
:win,
put_in(board.square.hit_coordinates, board.square.coordinates)
}
result = @sut.guess(board, guess)
assert result == expected
end
end
defp coords(coord_list) do
coord_list
|> Enum.map(fn {r, c} -> coord(r, c) end)
|> MapSet.new()
end
defp coord(row, col) do
{:ok, coord} = Coordinate.new(row, col)
coord
end
defp island(type, row, col) do
{:ok, coord} = Coordinate.new(row, col)
{:ok, island} = Island.new(type, coord)
island
end
end
| 22.698529 | 90 | 0.590217 |
03d008431d428d73ce887fb51684af9457d11cef | 21,054 | ex | Elixir | lib/ecto/migration.ex | victorsolis/ecto | 6c0dbf1ee2afd9b5bdf1f3feee8d361c8197c99a | [
"Apache-2.0"
] | null | null | null | lib/ecto/migration.ex | victorsolis/ecto | 6c0dbf1ee2afd9b5bdf1f3feee8d361c8197c99a | [
"Apache-2.0"
] | null | null | null | lib/ecto/migration.ex | victorsolis/ecto | 6c0dbf1ee2afd9b5bdf1f3feee8d361c8197c99a | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Migration do
@moduledoc """
Migrations are used to modify your database schema over time.
This module provides many helpers for migrating the database,
allowing developers to use Elixir to alter their storage in
a way that is database independent.
Here is an example:
defmodule MyRepo.Migrations.CreatePosts do
use Ecto.Migration
def up do
create table(:weather) do
add :city, :string, size: 40
add :temp_lo, :integer
add :temp_hi, :integer
add :prcp, :float
timestamps
end
end
def down do
drop table(:weather)
end
end
Note migrations have an `up/0` and `down/0` instructions, where
`up/0` is used to update your database and `down/0` rolls back
the prompted changes.
Ecto provides some mix tasks to help developers work with migrations:
* `mix ecto.gen.migration add_weather_table` - generates a
migration that the user can fill in with particular commands
* `mix ecto.migrate` - migrates a repository
* `mix ecto.rollback` - rolls back a particular migration
Run the `mix help COMMAND` for more information.
## Change
Migrations can also be automatically reversible by implementing
`change/0` instead of `up/0` and `down/0`. For example, the
migration above can be written as:
defmodule MyRepo.Migrations.CreatePosts do
use Ecto.Migration
def change do
create table(:weather) do
add :city, :string, size: 40
add :temp_lo, :integer
add :temp_hi, :integer
add :prcp, :float
timestamps
end
end
end
Notice not all commands are reversible though. Trying to rollback
a non-reversible command will raise an `Ecto.MigrationError`.
## Prefixes
Migrations support specifying a table prefix or index prefix which will target either a schema
if using Postgres, or a different database if using MySQL. If no prefix is
provided, the default schema or database is used.
Any reference declated in table migration refer by default table with same prefix declared for table.
The prefix is specified in the table options:
def up do
create table(:weather, prefix: :north_america) do
add :city, :string, size: 40
add :temp_lo, :integer
add :temp_hi, :integer
add :prcp, :float
add :group_id, references(:groups)
timestamps
end
create index(:weather, [:city], prefix: :north_america)
end
Note: if using MySQL with a prefixed table, you must use the same prefix for the references since
cross database references are not supported.
For both MySQL and Postgres with a prefixed table, you must use the same prefix for the index field to ensure
you index the prefix qualified table.
## Transactions
By default, Ecto runs all migrations inside a transaction. That's not always
ideal: for example, PostgreSQL allows to create/drop indexes concurrently but
only outside of any transaction (see the [PostgreSQL
docs](http://www.postgresql.org/docs/9.2/static/sql-createindex.html#SQL-CREATEINDEX-CONCURRENTLY)).
Migrations can be forced to run outside a transaction by setting the
`@disable_ddl_transaction` module attribute to `true`:
defmodule MyRepo.Migrations.CreateIndexes do
use Ecto.Migration
@disable_ddl_transaction true
def change do
create index(:posts, [:slug], concurrently: true)
end
end
Since running migrations outside a transaction can be dangerous, consider
performing very few operations in such migrations.
See the `index/3` function for more information on creating/dropping indexes
concurrently.
## Schema Migrations table
Version numbers of migrations will be saved in `schema_migrations` table.
But you can configure the table via:
config :app, App.Repo, migration_table: "my_migrations"
"""
defmodule Index do
@moduledoc """
Defines an index struct used in migrations.
"""
defstruct table: nil,
prefix: nil,
name: nil,
columns: [],
unique: false,
concurrently: false,
using: nil,
where: nil
@type t :: %__MODULE__{
table: atom,
prefix: atom,
name: atom,
columns: [atom | String.t],
unique: boolean,
concurrently: boolean,
using: atom | String.t,
where: atom | String.t
}
end
defmodule Table do
@moduledoc """
Defines a table struct used in migrations.
"""
defstruct name: nil, prefix: nil, primary_key: true, engine: nil, options: nil
@type t :: %__MODULE__{name: atom, prefix: atom | nil, primary_key: boolean,
engine: atom, options: String.t}
end
defmodule Reference do
@moduledoc """
Defines a reference struct used in migrations.
"""
defstruct name: nil, table: nil, column: :id, type: :serial, on_delete: :nothing
@type t :: %__MODULE__{table: atom, column: atom, type: atom, on_delete: atom}
end
defmodule Constraint do
@moduledoc """
Defines a Constraint struct used in migrations.
"""
defstruct name: nil, table: nil, check: nil, exclude: nil, prefix: nil
@type t :: %__MODULE__{name: atom, table: atom, prefix: atom | nil,
check: String.t | nil, exclude: String.t | nil}
end
alias Ecto.Migration.Runner
@doc false
defmacro __using__(_) do
quote location: :keep do
import Ecto.Migration
@disable_ddl_transaction false
@before_compile Ecto.Migration
end
end
@doc false
defmacro __before_compile__(_env) do
quote do
def __migration__,
do: [disable_ddl_transaction: @disable_ddl_transaction]
end
end
@doc """
Creates a table.
By default, the table will also include a primary_key of name `:id`
and type `:serial`. Check `table/2` docs for more information.
## Examples
create table(:posts) do
add :title, :string, default: "Untitled"
add :body, :text
timestamps
end
"""
defmacro create(object, do: block) do
do_create(object, :create, block)
end
@doc """
Creates a table if it does not exist.
Works just like `create/2` but does not raise an error when table
already exists.
"""
defmacro create_if_not_exists(object, do: block) do
do_create(object, :create_if_not_exists, block)
end
defp do_create(object, command, block) do
quote do
table = %Table{} = unquote(object)
Runner.start_command({unquote(command), Ecto.Migration.__prefix__(table)})
if table.primary_key do
add(:id, :serial, primary_key: true)
end
unquote(block)
Runner.end_command
table
end
end
@doc """
Alters a table.
## Examples
alter table(:posts) do
add :summary, :text
modify :title, :text
remove :views
end
"""
defmacro alter(object, do: block) do
quote do
table = %Table{} = unquote(object)
Runner.start_command({:alter, Ecto.Migration.__prefix__(table)})
unquote(block)
Runner.end_command
end
end
@doc """
Creates one of the following:
* an index
* a table with only an `:id` field
* a constraint
When reversing (in `change` running backward) indexes are only dropped if they
exist and no errors are raised. To enforce dropping an index use `drop/1`.
## Examples
create index(:posts, [:name])
create table(:version)
create constraint(:products, "price_must_be_positive", check: "price > 0")
"""
def create(%Index{} = index) do
Runner.execute {:create, __prefix__(index)}
index
end
def create(%Constraint{} = constraint) do
Runner.execute {:create, __prefix__(constraint)}
constraint
end
def create(%Table{} = table) do
do_create table, :create
table
end
@doc """
Creates an index or a table with only `:id` field if one does not yet exist.
## Examples
create_if_not_exists index(:posts, [:name])
create_if_not_exists table(:version)
"""
def create_if_not_exists(%Index{} = index) do
Runner.execute {:create_if_not_exists, __prefix__(index)}
end
def create_if_not_exists(%Table{} = table) do
do_create table, :create_if_not_exists
end
defp do_create(table, command) do
columns =
if table.primary_key do
[{:add, :id, :serial, primary_key: true}]
else
[]
end
Runner.execute {command, __prefix__(table), columns}
end
@doc """
Drops one of the following:
* an index
* a table
* a constraint
## Examples
drop index(:posts, [:name])
drop table(:posts)
drop constraint(:products, name: "price_must_be_positive")
"""
def drop(%{} = index_or_table_or_constraint) do
Runner.execute {:drop, __prefix__(index_or_table_or_constraint)}
index_or_table_or_constraint
end
@doc """
Drops a table or index if it exists.
Does not raise an error if table or index does not exist.
## Examples
drop_if_exists index(:posts, [:name])
drop_if_exists table(:posts)
"""
def drop_if_exists(%{} = index_or_table) do
Runner.execute {:drop_if_exists, __prefix__(index_or_table)}
index_or_table
end
@doc """
Returns a table struct that can be given on create, alter, etc.
## Examples
create table(:products) do
add :name, :string
add :price, :decimal
end
drop table(:products)
create table(:products, primary_key: false) do
add :name, :string
add :price, :decimal
end
## Options
* `:primary_key` - when false, does not generate primary key on table creation
* `:engine` - customizes the table storage for supported databases. For MySQL,
the default is InnoDB
* `:options` - provide custom options that will be appended after generated
statement, for example "WITH", "INHERITS" or "ON COMMIT" clauses
"""
def table(name, opts \\ []) when is_atom(name) do
struct(%Table{name: name}, opts)
end
@doc ~S"""
Returns an index struct that can be used on `create`, `drop`, etc.
Expects the table name as first argument and the index fields as
second. The field can be an atom, representing a column, or a
string representing an expression that is sent as is to the database.
Indexes are non-unique by default.
## Options
* `:name` - the name of the index. Defaults to "#{table}_#{column}_index"
* `:unique` - if the column(s) is unique or not
* `:concurrently` - if the index should be created/dropped concurrently
* `:using` - configures the index type
* `:prefix` - prefix for the index
* `:where` - the conditions for a partial index
## Adding/dropping indexes concurrently
PostgreSQL supports adding/dropping indexes concurrently (see the
[docs](http://www.postgresql.org/docs/9.4/static/sql-createindex.html)).
In order to take advantage of this, the `:concurrently` option needs to be set
to `true` when the index is created/dropped.
**Note**: in order for the `:concurrently` option to work, the migration must
not be run inside a transaction. See the `Ecto.Migration` docs for more
information on running migrations outside of a transaction.
## Index types
PostgreSQL supports several index types like B-tree, Hash or GiST. When
creating an index, the index type defaults to B-tree, but it can be specified
with the `:using` option. The `:using` option can be an atom or a string; its
value is passed to the `USING` clause as is.
More information on index types can be found in the [PostgreSQL
docs](http://www.postgresql.org/docs/9.4/static/indexes-types.html).
## Partial indexes
Databases like PostgreSQL and MSSQL supports partial indexes.
A partial index is an index built over a subset of a table. The subset
is defined by a conditional expression using the `:where` option.
The `:where` option can be an atom or a string; its value is passed
to the `WHERE` clause as is.
More information on partial indexes can be found in the [PostgreSQL
docs](http://www.postgresql.org/docs/9.4/static/indexes-partial.html).
## Examples
# Without a name, index defaults to products_category_id_sku_index
create index(:products, [:category_id, :sku], unique: true)
# Name can be given explicitly though
drop index(:products, [:category_id, :sku], name: :my_special_name)
# Indexes can be added concurrently
create index(:products, [:category_id, :sku], concurrently: true)
# The index type can be specified
create index(:products, [:name], using: :hash)
# Create an index on custom expressions
create index(:products, ["lower(name)"], name: :products_lower_name_index)
# Create a partial index
create index(:products, [:user_id], where: "price = 0", name: :free_products_index)
"""
def index(table, columns, opts \\ []) when is_atom(table) and is_list(columns) do
index = struct(%Index{table: table, columns: columns}, opts)
%{index | name: index.name || default_index_name(index)}
end
@doc """
Shortcut for creating a unique index.
See `index/3` for more information.
"""
def unique_index(table, columns, opts \\ []) when is_atom(table) and is_list(columns) do
index(table, columns, [unique: true] ++ opts)
end
defp default_index_name(index) do
[index.table, index.columns, "index"]
|> List.flatten
|> Enum.join("_")
|> String.replace(~r"[^\w_]", "_")
|> String.replace("__", "_")
|> String.to_atom
end
@doc """
Executes arbitrary SQL or a keyword command in NoSQL databases.
## Examples
execute "UPDATE posts SET published_at = NULL"
execute create: "posts", capped: true, size: 1024
"""
def execute(command) when is_binary(command) or is_list(command) do
Runner.execute command
end
@doc """
Gets the migrator direction.
"""
@spec direction :: :up | :down
def direction do
Runner.migrator_direction
end
@doc """
Adds a column when creating or altering a table.
This function also accepts Ecto primitive types as column types
and they are normalized by the database adapter. For example,
`string` is converted to varchar, `datetime` to the underlying
datetime or timestamp type, `binary` to bits or blob, and so on.
However, the column type is not always the same as the type in your
model. For example, a model that has a `string` field, can be
supported by columns of types `char`, `varchar`, `text` and others.
For this reason, this function also accepts `text` and other columns,
which are sent as is to the underlying database.
To sum up, the column type may be either an Ecto primitive type,
which is normalized in cases the database does not understand it,
like `string` or `binary`, or a database type which is passed as is.
Custom Ecto types, like `Ecto.Datetime`, are not supported because
they are application level concern and may not always map to the
database.
## Examples
create table(:posts) do
add :title, :string, default: "Untitled"
end
alter table(:posts) do
add :summary, :text # Database type
add :object, :json
end
## Options
* `:primary_key` - when true, marks this field as the primary key
* `:default` - the column's default value. can be a string, number
or a fragment generated by `fragment/1`
* `:null` - when `false`, the column does not allow null values
* `:size` - the size of the type (for example the numbers of characters).
Default is no size, except for `:string` that defaults to 255.
* `:precision` - the precision for a numeric type. Default is no precision
* `:scale` - the scale of a numeric type. Default is 0 scale
"""
def add(column, type, opts \\ []) when is_atom(column) do
validate_type!(type)
Runner.subcommand {:add, column, type, opts}
end
@doc """
Renames a table.
## Examples
rename table(:posts), to: table(:new_posts)
"""
def rename(%Table{} = table_current, to: %Table{} = table_new) do
Runner.execute {:rename, __prefix__(table_current), __prefix__(table_new)}
table_new
end
@doc """
Renames a column outside of the `alter` statement.
## Examples
rename table(:posts), :title, to: :summary
"""
def rename(%Table{} = table, current_column, to: new_column) when is_atom(current_column) and is_atom(new_column) do
Runner.execute {:rename, __prefix__(table), current_column, new_column}
table
end
@doc """
Generates a fragment to be used as default value.
## Examples
create table(:posts) do
add :inserted_at, :datetime, default: fragment("now()")
end
"""
def fragment(expr) when is_binary(expr) do
{:fragment, expr}
end
@doc """
Adds `:inserted_at` and `:updated_at` timestamps columns.
Those columns are of `:datetime` type and by default cannot
be null. `opts` can be given to customize the generated
fields.
"""
def timestamps(opts \\ []) do
opts = Keyword.put_new(opts, :null, false)
add(:inserted_at, :datetime, opts)
add(:updated_at, :datetime, opts)
end
@doc """
Modifies the type of column when altering a table.
See `add/3` for more information on supported types.
## Examples
alter table(:posts) do
modify :title, :text
end
## Options
* `:null` - sets to null or not null
* `:default` - changes the default
* `:size` - the size of the type (for example the numbers of characters). Default is no size.
* `:precision` - the precision for a numberic type. Default is no precision.
* `:scale` - the scale of a numberic type. Default is 0 scale.
"""
def modify(column, type, opts \\ []) when is_atom(column) do
Runner.subcommand {:modify, column, type, opts}
end
@doc """
Removes a column when altering a table.
## Examples
alter table(:posts) do
remove :title
end
"""
def remove(column) when is_atom(column) do
Runner.subcommand {:remove, column}
end
@doc ~S"""
Defines a foreign key.
## Examples
create table(:products) do
add :group_id, references(:groups)
end
## Options
* `:name` - The name of the underlying reference,
defaults to "#{table}_#{column}_fkey"
* `:column` - The foreign key column, default is `:id`
* `:type` - The foreign key type, default is `:serial`
* `:on_delete` - What to perform if the entry is deleted.
May be `:nothing`, `:delete_all` or `:nilify_all`.
Defaults to `:nothing`.
"""
def references(table, opts \\ []) when is_atom(table) do
reference = struct(%Reference{table: table}, opts)
unless reference.on_delete in [:nothing, :delete_all, :nilify_all] do
raise ArgumentError, "unknown :on_delete value: #{inspect reference.on_delete}"
end
reference
end
@doc ~S"""
Defines a constraint (either a check constraint or an exclude constraint) to be evaluated by the database when a row is inserted or updated.
## Examples
create constraint(:users, :price_must_be_positive, check: "price > 0")
create constraint(:size_ranges, :no_overlap, exclude: ~s|gist (int4range("from", "to", '[]') WITH &&)|
drop constraint(:products, "price_must_be_positive")
## Options
* `:check` - The expression to evaluate on a row. Required when creating.
* `:name` - The name of the constraint - required.
"""
def constraint(table, name, opts \\ [] ) do
struct(%Constraint{table: table, name: name}, opts)
end
@doc """
Executes queue migration commands.
Reverses the order commands are executed when doing a rollback
on a change/0 function and resets commands queue.
"""
def flush do
Runner.flush
end
defp validate_type!(type) when is_atom(type) do
case Atom.to_string(type) do
"Elixir." <> _ ->
raise ArgumentError,
"#{inspect type} is not a valid database type, " <>
"please use an atom like :string, :text and so on"
_ ->
:ok
end
end
defp validate_type!({type, subtype}) when is_atom(type) and is_atom(subtype) do
validate_type!(subtype)
end
defp validate_type!(%Reference{} = reference) do
reference
end
@doc false
def __prefix__(%{prefix: prefix} = index_or_table) do
runner_prefix = Runner.prefix()
cond do
is_nil(prefix) ->
%{index_or_table | prefix: runner_prefix}
is_nil(runner_prefix) or runner_prefix == prefix ->
index_or_table
true ->
raise Ecto.MigrationError, message:
"the :prefix option `#{inspect prefix}` does match the migrator prefix `#{inspect runner_prefix}`"
end
end
end
| 28.567164 | 142 | 0.66111 |
03d00c5acbd6948efba87a6c6cc2984d955b5a67 | 1,155 | exs | Elixir | config/config.exs | michaelkpfeifer/effective-interest-rate-elixir | 4a65059c95cc0fb6c48b1344be78c23dc5395082 | [
"MIT"
] | null | null | null | config/config.exs | michaelkpfeifer/effective-interest-rate-elixir | 4a65059c95cc0fb6c48b1344be78c23dc5395082 | [
"MIT"
] | null | null | null | config/config.exs | michaelkpfeifer/effective-interest-rate-elixir | 4a65059c95cc0fb6c48b1344be78c23dc5395082 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :effective_interest_rate, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:effective_interest_rate, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 37.258065 | 73 | 0.758442 |
03d02cbb0355ab514c884c2813f627c20b825d2b | 502 | ex | Elixir | apps/theta/lib/theta/account/user.ex | LangPham/thetaproject | c6479d1b761ff58fe6ae5f82e2d9de87a8658883 | [
"MIT"
] | null | null | null | apps/theta/lib/theta/account/user.ex | LangPham/thetaproject | c6479d1b761ff58fe6ae5f82e2d9de87a8658883 | [
"MIT"
] | 11 | 2020-07-21T09:34:54.000Z | 2021-08-29T07:38:02.000Z | apps/theta/lib/theta/account/user.ex | LangPham/thetaproject | c6479d1b761ff58fe6ae5f82e2d9de87a8658883 | [
"MIT"
] | null | null | null | defmodule Theta.Account.User do
use Ecto.Schema
import Ecto.Changeset
alias Theta.Account.Credential
schema "user" do
field :name, :string
field :username, :string
field :role, :string, default: 'USER'
field :avatar, :string
has_one :credential, Credential
timestamps()
end
@doc false
def changeset(user, attrs) do
user
|> cast(attrs, [:name, :username, :role])
|> validate_required([:name, :username])
|> unique_constraint(:username)
end
end
| 20.916667 | 45 | 0.667331 |
03d0972b9c025bf7ee119eb879399c762deb650f | 39,587 | ex | Elixir | lib/ecto/repo.ex | darkbaby123/ecto | ed861cbbfae1907463304597ee44d0951f9d0946 | [
"Apache-2.0"
] | null | null | null | lib/ecto/repo.ex | darkbaby123/ecto | ed861cbbfae1907463304597ee44d0951f9d0946 | [
"Apache-2.0"
] | null | null | null | lib/ecto/repo.ex | darkbaby123/ecto | ed861cbbfae1907463304597ee44d0951f9d0946 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Repo do
@moduledoc """
Defines a repository.
A repository maps to an underlying data store, controlled by the
adapter. For example, Ecto ships with a Postgres adapter that
stores data into a PostgreSQL database.
When used, the repository expects the `:otp_app` as option.
The `:otp_app` should point to an OTP application that has
the repository configuration. For example, the repository:
defmodule Repo do
use Ecto.Repo, otp_app: :my_app
end
Could be configured with:
config :my_app, Repo,
adapter: Ecto.Adapters.Postgres,
database: "ecto_simple",
username: "postgres",
password: "postgres",
hostname: "localhost"
Most of the configuration that goes into the `config` is specific
to the adapter, so check `Ecto.Adapters.Postgres` documentation
for more information. However, some configuration is shared across
all adapters, they are:
* `:adapter` - a compile-time option that specifies the adapter itself.
As a compile-time option, it may also be given as an option to `use Ecto.Repo`.
* `:name`- The name of the Repo supervisor process
* `:priv` - the directory where to keep repository data, like
migrations, schema and more. Defaults to "priv/YOUR_REPO".
It must always point to a subdirectory inside the priv directory.
* `:url` - an URL that specifies storage information. Read below
for more information
* `:loggers` - a list of `{mod, fun, args}` tuples that are
invoked by adapters for logging queries and other events.
The given module and function will be called with a log
entry (see `Ecto.LogEntry`) and the given arguments. The
invoked function must return the `Ecto.LogEntry` as result.
The default value is: `[{Ecto.LogEntry, :log, []}]`, which
will call `Ecto.LogEntry.log/1` that will use Elixir's `Logger`
in `:debug` mode. You may pass any desired mod-fun-args
triplet or `[{Ecto.LogEntry, :log, [:info]}]` if you want to
keep the current behaviour but use another log level.
This option is processed at compile-time and may also be given
as an option to `use Ecto.Repo`.
## URLs
Repositories by default support URLs. For example, the configuration
above could be rewritten to:
config :my_app, Repo,
url: "ecto://postgres:postgres@localhost/ecto_simple"
The schema can be of any value. The path represents the database name
while options are simply merged in.
URL can include query parameters to override shared and adapter-specific
options `ssl`, `timeout`, `pool_timeout`, `pool_size`:
config :my_app, Repo,
url: "ecto://postgres:postgres@localhost/ecto_simple?ssl=true&pool_size=10"
In case the URL needs to be dynamically configured, for example by
reading a system environment variable, such can be done via the
`c:init/2` repository callback:
def init(_type, config) do
{:ok, Keyword.put(config, :url, System.get_env("DATABASE_URL"))}
end
## Shared options
Almost all of the repository operations below accept the following
options:
* `:timeout` - The time in milliseconds to wait for the query call to
finish, `:infinity` will wait indefinitely (default: 15000);
* `:pool_timeout` - The time in milliseconds to wait for calls to the pool
to finish, `:infinity` will wait indefinitely (default: 5000);
* `:log` - When false, does not log the query
Such cases will be explicitly documented as well as any extra option.
"""
@type t :: module
@doc false
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@behaviour Ecto.Repo
{otp_app, adapter, config} = Ecto.Repo.Supervisor.compile_config(__MODULE__, opts)
@otp_app otp_app
@adapter adapter
@config config
@before_compile adapter
loggers =
Enum.reduce(opts[:loggers] || config[:loggers] || [Ecto.LogEntry], quote(do: entry), fn
mod, acc when is_atom(mod) ->
quote do: unquote(mod).log(unquote(acc))
{Ecto.LogEntry, :log, [level]}, _acc when not(level in [:error, :info, :warn, :debug]) ->
raise ArgumentError, "the log level #{inspect level} is not supported in Ecto.LogEntry"
{mod, fun, args}, acc ->
quote do: unquote(mod).unquote(fun)(unquote(acc), unquote_splicing(args))
end)
def __adapter__ do
@adapter
end
def __log__(entry) do
unquote(loggers)
end
def config do
{:ok, config} = Ecto.Repo.Supervisor.runtime_config(:dry_run, __MODULE__, @otp_app, [])
config
end
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :supervisor
}
end
def start_link(opts \\ []) do
Ecto.Repo.Supervisor.start_link(__MODULE__, @otp_app, @adapter, opts)
end
def stop(pid, timeout \\ 5000) do
Supervisor.stop(pid, :normal, timeout)
end
if function_exported?(@adapter, :transaction, 3) do
def transaction(fun_or_multi, opts \\ []) do
Ecto.Repo.Queryable.transaction(@adapter, __MODULE__, fun_or_multi, opts)
end
def in_transaction? do
@adapter.in_transaction?(__MODULE__)
end
@spec rollback(term) :: no_return
def rollback(value) do
@adapter.rollback(__MODULE__, value)
end
end
def all(queryable, opts \\ []) do
Ecto.Repo.Queryable.all(__MODULE__, @adapter, queryable, opts)
end
def stream(queryable, opts \\ []) do
Ecto.Repo.Queryable.stream(__MODULE__, @adapter, queryable, opts)
end
def get(queryable, id, opts \\ []) do
Ecto.Repo.Queryable.get(__MODULE__, @adapter, queryable, id, opts)
end
def get!(queryable, id, opts \\ []) do
Ecto.Repo.Queryable.get!(__MODULE__, @adapter, queryable, id, opts)
end
def get_by(queryable, clauses, opts \\ []) do
Ecto.Repo.Queryable.get_by(__MODULE__, @adapter, queryable, clauses, opts)
end
def get_by!(queryable, clauses, opts \\ []) do
Ecto.Repo.Queryable.get_by!(__MODULE__, @adapter, queryable, clauses, opts)
end
def one(queryable, opts \\ []) do
Ecto.Repo.Queryable.one(__MODULE__, @adapter, queryable, opts)
end
def one!(queryable, opts \\ []) do
Ecto.Repo.Queryable.one!(__MODULE__, @adapter, queryable, opts)
end
def aggregate(queryable, aggregate, field, opts \\ [])
when aggregate in [:count, :avg, :max, :min, :sum] and is_atom(field) do
Ecto.Repo.Queryable.aggregate(__MODULE__, @adapter, queryable, aggregate, field, opts)
end
def insert_all(schema_or_source, entries, opts \\ []) do
Ecto.Repo.Schema.insert_all(__MODULE__, @adapter, schema_or_source, entries, opts)
end
def update_all(queryable, updates, opts \\ []) do
Ecto.Repo.Queryable.update_all(__MODULE__, @adapter, queryable, updates, opts)
end
def delete_all(queryable, opts \\ []) do
Ecto.Repo.Queryable.delete_all(__MODULE__, @adapter, queryable, opts)
end
def insert(struct, opts \\ []) do
Ecto.Repo.Schema.insert(__MODULE__, @adapter, struct, opts)
end
def update(struct, opts \\ []) do
Ecto.Repo.Schema.update(__MODULE__, @adapter, struct, opts)
end
def insert_or_update(changeset, opts \\ []) do
Ecto.Repo.Schema.insert_or_update(__MODULE__, @adapter, changeset, opts)
end
def delete(struct, opts \\ []) do
Ecto.Repo.Schema.delete(__MODULE__, @adapter, struct, opts)
end
def insert!(struct, opts \\ []) do
Ecto.Repo.Schema.insert!(__MODULE__, @adapter, struct, opts)
end
def update!(struct, opts \\ []) do
Ecto.Repo.Schema.update!(__MODULE__, @adapter, struct, opts)
end
def insert_or_update!(changeset, opts \\ []) do
Ecto.Repo.Schema.insert_or_update!(__MODULE__, @adapter, changeset, opts)
end
def delete!(struct, opts \\ []) do
Ecto.Repo.Schema.delete!(__MODULE__, @adapter, struct, opts)
end
def preload(struct_or_structs_or_nil, preloads, opts \\ []) do
Ecto.Repo.Preloader.preload(struct_or_structs_or_nil, __MODULE__, preloads, opts)
end
def load(schema_or_types, data) do
Ecto.Repo.Schema.load(@adapter, schema_or_types, data)
end
defoverridable child_spec: 1
end
end
@optional_callbacks init: 2
@doc """
Returns the adapter tied to the repository.
"""
@callback __adapter__ :: Ecto.Adapter.t
@doc """
A callback invoked by adapters that logs the given action.
See `Ecto.LogEntry` for more information and `Ecto.Repo` module
documentation on setting up your own loggers.
"""
@callback __log__(entry :: Ecto.LogEntry.t) :: Ecto.LogEntry.t
@doc """
Returns the adapter configuration stored in the `:otp_app` environment.
If the `c:init/2` callback is implemented in the repository,
it will be invoked with the first argument set to `:dry_run`.
"""
@callback config() :: Keyword.t
@doc """
Starts any connection pooling or supervision and return `{:ok, pid}`
or just `:ok` if nothing needs to be done.
Returns `{:error, {:already_started, pid}}` if the repo is already
started or `{:error, term}` in case anything else goes wrong.
## Options
See the configuration in the moduledoc for options shared between adapters,
for adapter-specific configuration see the adapter's documentation.
"""
@callback start_link(opts :: Keyword.t) :: {:ok, pid} |
{:error, {:already_started, pid}} |
{:error, term}
@doc """
A callback executed when the repo starts or when configuration is read.
The first argument is the context the callback is being invoked. If it
is called because the Repo supervisor is starting, it will be `:supervisor`.
It will be `:dry_run` if it is called for reading configuration without
actually starting a process.
The second argument is the repository configuration as stored in the
application environment. It must return `{:ok, keyword}` with the updated
list of configuration or `:ignore` (only in the `:supervisor` case).
"""
@callback init(:supervisor | :dry_run, config :: Keyword.t) :: {:ok, Keyword.t} | :ignore
@doc """
Shuts down the repository represented by the given pid.
"""
@callback stop(pid, timeout) :: :ok
@doc """
Fetches a single struct from the data store where the primary key matches the
given id.
Returns `nil` if no result was found. If the struct in the queryable
has no or more than one primary key, it will raise an argument error.
## Options
See the "Shared options" section at the module documentation.
## Example
MyRepo.get(Post, 42)
"""
@callback get(queryable :: Ecto.Queryable.t, id :: term, opts :: Keyword.t) :: Ecto.Schema.t | nil | no_return
@doc """
Similar to `c:get/3` but raises `Ecto.NoResultsError` if no record was found.
## Options
See the "Shared options" section at the module documentation.
## Example
MyRepo.get!(Post, 42)
"""
@callback get!(queryable :: Ecto.Queryable.t, id :: term, opts :: Keyword.t) :: Ecto.Schema.t | nil | no_return
@doc """
Fetches a single result from the query.
Returns `nil` if no result was found. Raises if more than one entry.
## Options
See the "Shared options" section at the module documentation.
## Example
MyRepo.get_by(Post, title: "My post")
"""
@callback get_by(queryable :: Ecto.Queryable.t, clauses :: Keyword.t | map, opts :: Keyword.t) :: Ecto.Schema.t | nil | no_return
@doc """
Similar to `get_by/3` but raises `Ecto.NoResultsError` if no record was found.
Raises if more than one entry.
## Options
See the "Shared options" section at the module documentation.
## Example
MyRepo.get_by!(Post, title: "My post")
"""
@callback get_by!(queryable :: Ecto.Queryable.t, clauses :: Keyword.t | map, opts :: Keyword.t) :: Ecto.Schema.t | nil | no_return
@doc """
Calculate the given `aggregate` over the given `field`.
If the query has a limit, offset or distinct set, it will be
automatically wrapped in a subquery in order to return the
proper result.
Any preload or select in the query will be ignored in favor of
the column being aggregated.
The aggregation will fail if any `group_by` field is set.
## Options
See the "Shared options" section at the module documentation.
## Examples
# Returns the number of visits per blog post
Repo.aggregate(Post, :count, :visits)
# Returns the average number of visits for the top 10
query = from Post, limit: 10
Repo.aggregate(query, :avg, :visits)
"""
@callback aggregate(queryable :: Ecto.Queryable.t, aggregate :: :avg | :count | :max | :min | :sum,
field :: atom, opts :: Keyword.t) :: term | nil
@doc """
Fetches a single result from the query.
Returns `nil` if no result was found. Raises if more than one entry.
## Options
See the "Shared options" section at the module documentation.
"""
@callback one(queryable :: Ecto.Queryable.t, opts :: Keyword.t) :: Ecto.Schema.t | nil | no_return
@doc """
Similar to `c:one/2` but raises `Ecto.NoResultsError` if no record was found.
Raises if more than one entry.
## Options
See the "Shared options" section at the module documentation.
"""
@callback one!(queryable :: Ecto.Queryable.t, opts :: Keyword.t) :: Ecto.Schema.t | no_return
@doc """
Preloads all associations on the given struct or structs.
This is similar to `Ecto.Query.preload/3` except it allows
you to preload structs after they have been fetched from the
database.
In case the association was already loaded, preload won't attempt
to reload it.
## Options
Besides the "Shared options" section at the module documentation,
it accepts:
* `:force` - By default, Ecto won't preload associations that
are already loaded. By setting this option to true, any existing
association will be discarded and reloaded.
* `:in_parallel` - If the preloads must be done in parallel. It can
only be performed when we have more than one preload and the
repository is not in a transaction. Defaults to `true`.
* `:prefix` - the prefix to fetch preloads from. By default, queries
will use the same prefix as the one in the given collection. This
option allows the prefix to be changed.
## Examples
# Use a single atom to preload an association
posts = Repo.preload posts, :comments
# Use a list of atoms to preload multiple associations
posts = Repo.preload posts, [:comments, :authors]
# Use a keyword list to preload nested associations as well
posts = Repo.preload posts, [comments: [:replies, :likes], authors: []]
# Use a keyword list to customize how associations are queried
posts = Repo.preload posts, [comments: from(c in Comment, order_by: c.published_at)]
# Use a two-element tuple for a custom query and nested association definition
query = from c in Comment, order_by: c.published_at
posts = Repo.preload posts, [comments: {query, [:replies, :likes]}]
Note: The query given to preload may also preload its own associations.
"""
@callback preload(structs_or_struct_or_nil, preloads :: term, opts :: Keyword.t) ::
structs_or_struct_or_nil when structs_or_struct_or_nil: [Ecto.Schema.t] | Ecto.Schema.t | nil
@doc """
Fetches all entries from the data store matching the given query.
May raise `Ecto.QueryError` if query validation fails.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the query.
See the "Shared options" section at the module documentation.
## Example
# Fetch all post titles
query = from p in Post,
select: p.title
MyRepo.all(query)
"""
@callback all(queryable :: Ecto.Query.t, opts :: Keyword.t) :: [Ecto.Schema.t] | no_return
@doc """
Returns a lazy enumerable that emits all entries from the data store
matching the given query. SQL adapters, such as Postgres and MySQL, can only
enumerate a stream inside a transaction.
May raise `Ecto.QueryError` if query validation fails.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the query
* `:max_rows` - The number of rows to load from the database as we stream.
It is supported at least by Postgres and MySQL and defaults to 500.
See the "Shared options" section at the module documentation.
## Example
# Fetch all post titles
query = from p in Post,
select: p.title
stream = MyRepo.stream(query)
MyRepo.transaction(fn() ->
Enum.to_list(stream)
end)
"""
@callback stream(queryable :: Ecto.Query.t, opts :: Keyword.t) :: Enum.t
@doc """
Inserts all entries into the repository.
It expects a schema (`MyApp.User`) or a source (`"users"`) or
both (`{"users", MyApp.User}`) as the first argument. The second
argument is a list of entries to be inserted, either as keyword
lists or as maps.
It returns a tuple containing the number of entries
and any returned result as second element. If the database
does not support RETURNING in INSERT statements or no
return result was selected, the second element will be `nil`.
When a schema is given, the values given will be properly dumped
before being sent to the database. If the schema contains an
autogenerated ID field, it will be handled either at the adapter
or the storage layer. However any other autogenerated value, like
timestamps, won't be autogenerated when using `c:insert_all/3`.
This is by design as this function aims to be a more direct way
to insert data into the database without the conveniences of
`c:insert/2`. This is also consistent with `c:update_all/3` that
does not handle timestamps as well.
It is also not possible to use `insert_all` to insert across multiple
tables, therefore associations are not supported.
If a source is given, without a schema, the given fields are passed
as is to the adapter.
## Options
* `:returning` - selects which fields to return. When `true`,
returns all fields in the given struct. May be a list of
fields, where a struct is still returned but only with the
given fields. Or `false`, where nothing is returned (the default).
This option is not supported by all databases.
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL).
* `:on_conflict` - It may be one of `:raise` (the default), `:nothing`,
`:replace_all`, `:replace_all_except_primary_key`, `{:replace, fields}`,
a keyword list of update instructions, `{:replace, fields}` or an `Ecto.Query`
query for updates. See the "Upserts" section for more information.
* `:conflict_target` - Which columns to verify for conflicts. If
none is specified, the conflict target is left up to the database
and is usually made of primary keys and/or unique/exclusion constraints.
May also be `{:constraint, constraint_name_as_atom}` in databases
that support the "ON CONSTRAINT" expression.
See the "Shared options" section at the module documentation for
remaining options.
## Examples
MyRepo.insert_all(Post, [[title: "My first post"], [title: "My second post"]])
MyRepo.insert_all(Post, [%{title: "My first post"}, %{title: "My second post"}])
## Upserts
`c:insert_all/3` provides upserts (update or inserts) via the `:on_conflict`
option. The `:on_conflict` option supports the following values:
* `:raise` - raises if there is a conflicting primary key or unique index
* `:nothing` - ignores the error in case of conflicts
* `:replace_all` - replace all values on the existing row with the values
in sent by Ecto
* `:replace_all_except_primary_key` - same as above except primary keys are
not replaced. This option requires a schema
* `{:replace, fields}` - replace only specific columns. This option requires
conflict_target
* a keyword list of update instructions - such as the one given to
`c:update_all/3`, for example: `[set: [title: "new title"]]`
* an `Ecto.Query` that will act as an `UPDATE` statement, such as the
one given to `c:update_all/3`
Upserts map to "ON CONFLICT" on databases like Postgres and "ON DUPLICATE KEY"
on databases such as MySQL.
## Return values
By default, both Postgres and MySQL return the amount of entries
inserted on `c:insert_all/3`. However, when the `:on_conflict` option
is specified, Postgres will only return a row if it was affected
while MySQL returns at least the number of entries attempted.
For example, if `:on_conflict` is set to `:nothing`, Postgres will
return 0 if no new entry was added while MySQL will still return
the amount of entries attempted to be inserted, even if no entry
was added. Even worse, if `:on_conflict` is query, MySQL will return
the number of attempted entries plus the number of entries modified
by the UPDATE query.
"""
@callback insert_all(schema_or_source :: binary | {binary, Ecto.Schema.t} | Ecto.Schema.t,
entries :: [map | Keyword.t], opts :: Keyword.t) :: {integer, nil | [term]} | no_return
@doc """
Updates all entries matching the given query with the given values.
It returns a tuple containing the number of entries and any returned
result as second element. The second element is `nil` by default
unless a `select` is supplied in the update query. Note, however,
not all databases support returning data from UPDATEs.
Keep in mind this `update_all` will not update autogenerated
fields like the `updated_at` columns.
See `Ecto.Query.update/3` for update operations that can be
performed on fields.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the query.
See the "Shared options" section at the module documentation for
remaining options.
## Examples
MyRepo.update_all(Post, set: [title: "New title"])
MyRepo.update_all(Post, inc: [visits: 1])
MyRepo.update_all(Post, [inc: [visits: 1]], [returning: [:visits]])
from(p in Post, where: p.id < 10)
|> MyRepo.update_all(set: [title: "New title"])
from(p in Post, where: p.id < 10, update: [set: [title: "New title"]])
|> MyRepo.update_all([])
from(p in Post, where: p.id < 10, update: [set: [title: ^new_title]])
|> MyRepo.update_all([])
from(p in Post, where: p.id < 10, update: [set: [title: fragment("upper(?)", ^new_title)]])
|> MyRepo.update_all([])
"""
@callback update_all(queryable :: Ecto.Queryable.t, updates :: Keyword.t, opts :: Keyword.t) ::
{integer, nil | [term]} | no_return
@doc """
Deletes all entries matching the given query.
It returns a tuple containing the number of entries and any returned
result as second element. The second element is `nil` by default
unless a `select` is supplied in the update query. Note, however,
not all databases support returning data from DELETEs.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the query.
See the "Shared options" section at the module documentation for
remaining options.
## Examples
MyRepo.delete_all(Post)
from(p in Post, where: p.id < 10) |> MyRepo.delete_all
"""
@callback delete_all(queryable :: Ecto.Queryable.t, opts :: Keyword.t) ::
{integer, nil | [term]} | no_return
@doc """
Inserts a struct defined via `Ecto.Schema` or a changeset.
In case a struct is given, the struct is converted into a changeset
with all non-nil fields as part of the changeset.
In case a changeset is given, the changes in the changeset are
merged with the struct fields, and all of them are sent to the
database.
It returns `{:ok, struct}` if the struct has been successfully
inserted or `{:error, changeset}` if there was a validation
or a known constraint error.
## Options
* `:returning` - selects which fields to return. When `true`, returns
all fields in the given struct. May be a list of fields, where a
struct is still returned but only with the given fields. In any case,
it will include fields with `read_after_writes` set to true.
This option is not supported by all databases.
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the struct.
* `:on_conflict` - It may be one of `:raise` (the default), `:nothing`,
`:replace_all`, `:replace_all_except_primary_key`, `{:replace, fields}`,
a keyword list of update instructions or an `Ecto.Query` query for updates.
See the "Upserts" section for more information.
* `:conflict_target` - Which columns to verify for conflicts. If
none is specified, the conflict target is left up to the database
and is usually made of primary keys and/or unique/exclusion constraints.
May also be `{:constraint, constraint_name_as_atom}` in databases
that support the "ON CONSTRAINT" expression.
See the "Shared options" section at the module documentation.
## Examples
A typical example is calling `MyRepo.insert/1` with a struct
and acting on the return value:
case MyRepo.insert %Post{title: "Ecto is great"} do
{:ok, struct} -> # Inserted with success
{:error, changeset} -> # Something went wrong
end
## Upserts
`c:insert/2` provides upserts (update or inserts) via the `:on_conflict`
option. The `:on_conflict` option supports the following values:
* `:raise` - raises if there is a conflicting primary key or unique index
* `:nothing` - ignores the error in case of conflicts
* `:replace_all` - replace all values on the existing row with the values
in the schema/changeset, including autogenerated fields such as `inserted_at`
and `updated_at`
* `:replace_all_except_primary_key` - same as above except primary keys are
not replaced
* `{:replace, fields}` - replace only specific columns. This option requires
conflict_target
* a keyword list of update instructions - such as the one given to
`c:update_all/3`, for example: `[set: [title: "new title"]]`
* an `Ecto.Query` that will act as an `UPDATE` statement, such as the
one given to `c:update_all/3`
Upserts map to "ON CONFLICT" on databases like Postgres and "ON DUPLICATE KEY"
on databases such as MySQL.
As an example, imagine `:title` is marked as a unique column in
the database:
{:ok, inserted} = MyRepo.insert(%Post{title: "this is unique"})
Now we can insert with the same title but do nothing on conflicts:
{:ok, ignored} = MyRepo.insert(%Post{title: "this is unique"}, on_conflict: :nothing)
assert ignored.id == nil
Because we used `on_conflict: :nothing`, instead of getting an error,
we got `{:ok, struct}`. However the returned struct does not reflect
the data in the database. One possible mechanism to detect if an
insert or nothing happened in case of `on_conflict: :nothing` is by
checking the `id` field. `id` will be nil if the field is autogenerated
by the database and no insert happened.
For actual upserts, where an insert or update may happen, the situation
is slightly more complex, as the database does not actually inform us
if an insert or update happened. Let's insert a post with the same title
but use a query to update the body column in case of conflicts:
# In Postgres (it requires the conflict target for updates):
on_conflict = [set: [body: "updated"]]
{:ok, updated} = MyRepo.insert(%Post{title: "this is unique"},
on_conflict: on_conflict, conflict_target: :title)
# In MySQL (conflict target is not supported):
on_conflict = [set: [title: "updated"]]
{:ok, updated} = MyRepo.insert(%Post{id: inserted.id, title: "updated"},
on_conflict: on_conflict)
In the examples above, even though it returned `:ok`, we do not know
if we inserted new data or if we updated only the `:on_conflict` fields.
In case an update happened, the data in the struct most likely does
not match the data in the database. For example, autogenerated fields
such as `inserted_at` will point to now rather than the time the
struct was actually inserted.
If you need to guarantee the data in the returned struct mirrors the
database, you have three options:
* Use `on_conflict: :replace_all`, although that will replace all
fields in the database with the ones in the struct/changeset,
including autogenerated fields such as `insert_at` and `updated_at`:
MyRepo.insert(%Post{title: "this is unique"},
on_conflict: :replace_all, conflict_target: :title)
* Specify `read_after_writes: true` in your schema for choosing
fields that are read from the database after every operation.
Or pass `returning: true` to `insert` to read all fields back:
MyRepo.insert(%Post{title: "this is unique"}, returning: true,
on_conflict: on_conflict, conflict_target: :title)
* Alternatively, read the data again from the database in a separate
query. This option requires the primary key to be generated by the
database:
{:ok, updated} = MyRepo.insert(%Post{title: "this is unique"}, on_conflict: on_conflict)
Repo.get(Post, updated.id)
"""
@callback insert(struct_or_changeset :: Ecto.Schema.t | Ecto.Changeset.t, opts :: Keyword.t) ::
{:ok, Ecto.Schema.t} | {:error, Ecto.Changeset.t}
@doc """
Updates a changeset using its primary key.
A changeset is required as it is the only mechanism for
tracking dirty changes. Only the fields present in the `changes` part
of the changeset are sent to the database. Any other, in-memory
changes done to the schema are ignored.
If the struct has no primary key, `Ecto.NoPrimaryKeyFieldError`
will be raised.
It returns `{:ok, struct}` if the struct has been successfully
updated or `{:error, changeset}` if there was a validation
or a known constraint error.
## Options
Besides the "Shared options" section at the module documentation,
it accepts:
* `:force` - By default, if there are no changes in the changeset,
`c:update/2` is a no-op. By setting this option to true, update
callbacks will always be executed, even if there are no changes
(including timestamps).
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the struct.
## Example
post = MyRepo.get!(Post, 42)
post = Ecto.Changeset.change post, title: "New title"
case MyRepo.update post do
{:ok, struct} -> # Updated with success
{:error, changeset} -> # Something went wrong
end
"""
@callback update(changeset :: Ecto.Changeset.t, opts :: Keyword.t) ::
{:ok, Ecto.Schema.t} | {:error, Ecto.Changeset.t}
@doc """
Inserts or updates a changeset depending on whether the struct is persisted
or not.
The distinction whether to insert or update will be made on the
`Ecto.Schema.Metadata` field `:state`. The `:state` is automatically set by
Ecto when loading or building a schema.
Please note that for this to work, you will have to load existing structs from
the database. So even if the struct exists, this won't work:
struct = %Post{id: "existing_id", ...}
MyRepo.insert_or_update changeset
# => {:error, changeset} # id already exists
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the struct.
See the "Shared options" section at the module documentation.
## Example
result =
case MyRepo.get(Post, id) do
nil -> %Post{id: id} # Post not found, we build one
post -> post # Post exists, let's use it
end
|> Post.changeset(changes)
|> MyRepo.insert_or_update
case result do
{:ok, struct} -> # Inserted or updated with success
{:error, changeset} -> # Something went wrong
end
"""
@callback insert_or_update(changeset :: Ecto.Changeset.t, opts :: Keyword.t) ::
{:ok, Ecto.Schema.t} | {:error, Ecto.Changeset.t}
@doc """
Deletes a struct using its primary key.
If the struct has no primary key, `Ecto.NoPrimaryKeyFieldError`
will be raised.
It returns `{:ok, struct}` if the struct has been successfully
deleted or `{:error, changeset}` if there was a validation
or a known constraint error.
## Options
* `:prefix` - The prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). This overrides the prefix set
in the struct.
See the "Shared options" section at the module documentation.
## Example
post = MyRepo.get!(Post, 42)
case MyRepo.delete post do
{:ok, struct} -> # Deleted with success
{:error, changeset} -> # Something went wrong
end
"""
@callback delete(struct_or_changeset :: Ecto.Schema.t | Ecto.Changeset.t, opts :: Keyword.t) ::
{:ok, Ecto.Schema.t} | {:error, Ecto.Changeset.t}
@doc """
Same as `c:insert/2` but returns the struct or raises if the changeset is invalid.
"""
@callback insert!(struct_or_changeset :: Ecto.Schema.t | Ecto.Changeset.t, opts :: Keyword.t) ::
Ecto.Schema.t | no_return
@doc """
Same as `c:update/2` but returns the struct or raises if the changeset is invalid.
"""
@callback update!(changeset :: Ecto.Changeset.t, opts :: Keyword.t) ::
Ecto.Schema.t | no_return
@doc """
Same as `c:insert_or_update/2` but returns the struct or raises if the changeset
is invalid.
"""
@callback insert_or_update!(changeset :: Ecto.Changeset.t, opts :: Keyword.t) ::
Ecto.Schema.t | no_return
@doc """
Same as `c:delete/2` but returns the struct or raises if the changeset is invalid.
"""
@callback delete!(struct_or_changeset :: Ecto.Schema.t | Ecto.Changeset.t, opts :: Keyword.t) ::
Ecto.Schema.t | no_return
@doc """
Runs the given function or `Ecto.Multi` inside a transaction.
## Use with function
If an unhandled error occurs the transaction will be rolled back
and the error will bubble up from the transaction function.
If no error occurred the transaction will be committed when the
function returns. A transaction can be explicitly rolled back
by calling `c:rollback/1`, this will immediately leave the function
and return the value given to `rollback` as `{:error, value}`.
A successful transaction returns the value returned by the function
wrapped in a tuple as `{:ok, value}`.
If `c:transaction/2` is called inside another transaction, the function
is simply executed, without wrapping the new transaction call in any
way. If there is an error in the inner transaction and the error is
rescued, or the inner transaction is rolled back, the whole outer
transaction is marked as tainted, guaranteeing nothing will be committed.
## Use with Ecto.Multi
Besides functions transaction can be used with an Ecto.Multi struct.
Transaction will be started, all operations applied and in case of
success committed returning `{:ok, changes}`. In case of any errors
the transaction will be rolled back and
`{:error, failed_operation, failed_value, changes_so_far}` will be
returned.
You can read more about using transactions with `Ecto.Multi` as well as
see some examples in the `Ecto.Multi` documentation.
## Options
See the "Shared options" section at the module documentation.
## Examples
import Ecto.Changeset, only: [change: 2]
MyRepo.transaction(fn ->
MyRepo.update!(change(alice, balance: alice.balance - 10))
MyRepo.update!(change(bob, balance: bob.balance + 10))
end)
# Roll back a transaction explicitly
MyRepo.transaction(fn ->
p = MyRepo.insert!(%Post{})
if not Editor.post_allowed?(p) do
MyRepo.rollback(:posting_not_allowed)
end
end)
# With Ecto.Multi
Ecto.Multi.new
|> Ecto.Multi.insert(:post, %Post{})
|> MyRepo.transaction
"""
@callback transaction(fun_or_multi :: fun | Ecto.Multi.t, opts :: Keyword.t) ::
{:ok, any} | {:error, any} | {:error, atom, any, %{atom => any}}
@optional_callbacks [transaction: 2]
@doc """
Returns true if the current process is inside a transaction.
## Examples
MyRepo.in_transaction?
#=> false
MyRepo.transaction(fn ->
MyRepo.in_transaction? #=> true
end)
"""
@callback in_transaction?() :: boolean
@optional_callbacks [in_transaction?: 0]
@doc """
Rolls back the current transaction.
The transaction will return the value given as `{:error, value}`.
"""
@callback rollback(value :: any) :: no_return
@optional_callbacks [rollback: 1]
@doc """
Loads `data` into a struct or a map.
The first argument can be a schema, or a map (of types) and determines the return value:
a struct or a map, respectively.
The second argument `data` specifies fields and values that are to be loaded.
It can be a map, a keyword list, or a `{fields, values}` tuple.
Fields can be atoms or strings.
Fields that are not present in the schema (or `types` map) are ignored.
If any of the values has invalid type, an error is raised.
## Examples
iex> MyRepo.load(User, %{name: "Alice", age: 25})
%User{name: "Alice", age: 25}
iex> MyRepo.load(User, [name: "Alice", age: 25])
%User{name: "Alice", age: 25}
`data` can also take form of `{fields, values}`:
iex> MyRepo.load(User, {[:name, :age], ["Alice", 25]})
%User{name: "Alice", age: 25, ...}
The first argument can also be a `types` map:
iex> types = %{name: :string, age: :integer}
iex> MyRepo.load(types, %{name: "Alice", age: 25})
%{name: "Alice", age: 25}
This function is especially useful when parsing raw query results:
iex> result = Ecto.Adapters.SQL.query!(MyRepo, "SELECT * FROM users", [])
iex> Enum.map(result.rows, &MyRepo.load(User, {result.columns, &1}))
[%User{...}, ...]
"""
@callback load(struct_or_map :: Ecto.Schema.t | map(), data :: map() | Keyword.t | {list, list}) ::
Ecto.Schema.t | map()
end
| 36.152511 | 132 | 0.673403 |
03d0c2374b233323c99a47ab354dda056c24fd0e | 4,018 | exs | Elixir | test/helpers_test.exs | carakan/ex_admin | c286a0b66c1c0e81c896b71915c7066f66fc94c2 | [
"MIT"
] | null | null | null | test/helpers_test.exs | carakan/ex_admin | c286a0b66c1c0e81c896b71915c7066f66fc94c2 | [
"MIT"
] | null | null | null | test/helpers_test.exs | carakan/ex_admin | c286a0b66c1c0e81c896b71915c7066f66fc94c2 | [
"MIT"
] | null | null | null | defmodule ExAdmin.HelpersTest do
use ExUnit.Case
alias ExAdmin.Helpers
alias TestExAdmin.Noid
alias TestExAdmin.Simple
alias TestExAdmin.Maps
use Xain
test "build_field" do
res =
Helpers.build_field(%Noid{description: "desc"}, %{}, {:description, %{}}, fn contents,
field_name ->
ExAdmin.Table.handle_contents(contents, field_name)
end)
assert res == ~s(<td class='td-description'>desc</td>)
end
test "build_field Actions" do
resource = %Simple{name: "N", description: "D", id: 1}
defn = %TestExAdmin.ExAdmin.Simple{}
conn =
Plug.Conn.assign(%Plug.Conn{}, :theme, ExAdmin.Theme.AdminLte2)
|> Plug.Conn.assign(:defn, defn)
expected =
"<td class='td-actions'><a href='/admin/simples/1' class='member_link view_link' title='View'>View</a>" <>
"<a href='/admin/simples/1/edit' class='member_link edit_link' title='Edit'>Edit</a>" <>
"<a href='/admin/simples/1' class='member_link delete_link'" <>
" data-confirm='Are you sure you want to delete this?'" <>
" data-remote='true' data-method='delete' data-params='page=1' rel='nofollow' title='Delete'>Delete</a></td>"
res =
Helpers.build_field(
resource,
conn,
{"Actions",
%{
fun: fn res ->
ExAdmin.Index.build_index_links(conn, res, [:show, :edit, :delete])
end
}},
fn contents, field_name ->
ExAdmin.Table.handle_contents(contents, field_name)
end
)
assert res == expected
end
test "build_field with complex map data" do
resource = %Maps{stats: %{list: [%{}]}}
res =
Helpers.build_field(resource, %{}, {:stats, %{}}, fn contents, _field_name ->
ExAdmin.Render.to_string(contents)
end)
assert res == ~s(list: [{}])
end
test "group_by" do
list = [one: 1, two: 2, two: 3]
result = Helpers.group_by(list, &elem(&1, 0))
assert result[:one] == [one: 1]
assert result[:two] == [two: 2, two: 3]
end
test "group_reduce_by_reverse" do
list = [one: 1, two: 2, two: 3]
result = Helpers.group_reduce_by_reverse(list)
assert result[:one] == [1]
assert result[:two] == [3, 2]
end
test "group_reduce_by" do
list = [one: 1, two: 2, two: 3]
result = Helpers.group_reduce_by(list)
assert result[:one] == [1]
assert result[:two] == [2, 3]
list = [
after_filter: {:three, []},
before_filter: {:two, [only: [:update]]},
before_filter: {:one, [only: [:create, :update]]}
]
result = Helpers.group_reduce_by(list)
assert result[:before_filter] == [two: [only: [:update]], one: [only: [:create, :update]]]
assert result[:after_filter] == [three: []]
end
test "get_name_field :name" do
assert Helpers.get_name_field(TestExAdmin.User) == :name
end
test "get_name_field :title" do
assert Helpers.get_name_field(TestExAdmin.Product) == :title
end
test "get_name_field not first" do
assert Helpers.get_name_field(TestExAdmin.Noid) == :name
end
test "get_name_field :first string field" do
assert Helpers.get_name_field(TestExAdmin.PhoneNumber) == :number
end
test "display_name name" do
assert Helpers.display_name(%TestExAdmin.User{name: "test"}) == "test"
end
test "display_name first string field" do
assert Helpers.display_name(%TestExAdmin.PhoneNumber{number: "5555"}) == "5555"
end
test "model_name from atom" do
assert Helpers.model_name(TestExAdmin.PhoneNumber) == "phone_number"
end
test "model_name from struct" do
assert Helpers.model_name(%TestExAdmin.PhoneNumber{}) == "phone_number"
end
test "model_name from atom override" do
assert Helpers.model_name(TestExAdmin.ModelDisplayName) == "custom_name"
end
test "model_name from struct override" do
assert Helpers.model_name(%TestExAdmin.ModelDisplayName{}) == "custom_name"
end
end
| 29.985075 | 117 | 0.624938 |
03d0cbfac5cca470bfc6e906dc12620de600be8a | 1,844 | ex | Elixir | clients/games/lib/google_api/games/v1/model/event_record_failure.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/games/lib/google_api/games/v1/model/event_record_failure.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/games/lib/google_api/games/v1/model/event_record_failure.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Games.V1.Model.EventRecordFailure do
@moduledoc """
This is a JSON template for an event update failure resource.
## Attributes
- eventId (String): The ID of the event that was not updated. Defaults to: `null`.
- failureCause (String): The cause for the update failure. Possible values are: - \"NOT_FOUND\" - An attempt was made to set an event that was not defined. - \"INVALID_UPDATE_VALUE\" - An attempt was made to increment an event by a non-positive value. Defaults to: `null`.
- kind (String): Uniquely identifies the type of this resource. Value is always the fixed string games#eventRecordFailure. Defaults to: `null`.
"""
defstruct [
:"eventId",
:"failureCause",
:"kind"
]
end
defimpl Poison.Decoder, for: GoogleApi.Games.V1.Model.EventRecordFailure do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.Games.V1.Model.EventRecordFailure do
def encode(value, options) do
GoogleApi.Games.V1.Deserializer.serialize_non_nil(value, options)
end
end
| 36.88 | 297 | 0.745119 |
03d0e47bff22341e2933cb0048e24c964ec87450 | 4,479 | ex | Elixir | lib/codes/codes_e78.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_e78.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_e78.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_E78 do
alias IcdCode.ICDCode
def _E7800 do
%ICDCode{full_code: "E7800",
category_code: "E78",
short_code: "00",
full_name: "Pure hypercholesterolemia, unspecified",
short_name: "Pure hypercholesterolemia, unspecified",
category_name: "Pure hypercholesterolemia, unspecified"
}
end
def _E7801 do
%ICDCode{full_code: "E7801",
category_code: "E78",
short_code: "01",
full_name: "Familial hypercholesterolemia",
short_name: "Familial hypercholesterolemia",
category_name: "Familial hypercholesterolemia"
}
end
def _E781 do
%ICDCode{full_code: "E781",
category_code: "E78",
short_code: "1",
full_name: "Pure hyperglyceridemia",
short_name: "Pure hyperglyceridemia",
category_name: "Pure hyperglyceridemia"
}
end
def _E782 do
%ICDCode{full_code: "E782",
category_code: "E78",
short_code: "2",
full_name: "Mixed hyperlipidemia",
short_name: "Mixed hyperlipidemia",
category_name: "Mixed hyperlipidemia"
}
end
def _E783 do
%ICDCode{full_code: "E783",
category_code: "E78",
short_code: "3",
full_name: "Hyperchylomicronemia",
short_name: "Hyperchylomicronemia",
category_name: "Hyperchylomicronemia"
}
end
def _E784 do
%ICDCode{full_code: "E784",
category_code: "E78",
short_code: "4",
full_name: "Other hyperlipidemia",
short_name: "Other hyperlipidemia",
category_name: "Other hyperlipidemia"
}
end
def _E785 do
%ICDCode{full_code: "E785",
category_code: "E78",
short_code: "5",
full_name: "Hyperlipidemia, unspecified",
short_name: "Hyperlipidemia, unspecified",
category_name: "Hyperlipidemia, unspecified"
}
end
def _E786 do
%ICDCode{full_code: "E786",
category_code: "E78",
short_code: "6",
full_name: "Lipoprotein deficiency",
short_name: "Lipoprotein deficiency",
category_name: "Lipoprotein deficiency"
}
end
def _E7870 do
%ICDCode{full_code: "E7870",
category_code: "E78",
short_code: "70",
full_name: "Disorder of bile acid and cholesterol metabolism, unspecified",
short_name: "Disorder of bile acid and cholesterol metabolism, unspecified",
category_name: "Disorder of bile acid and cholesterol metabolism, unspecified"
}
end
def _E7871 do
%ICDCode{full_code: "E7871",
category_code: "E78",
short_code: "71",
full_name: "Barth syndrome",
short_name: "Barth syndrome",
category_name: "Barth syndrome"
}
end
def _E7872 do
%ICDCode{full_code: "E7872",
category_code: "E78",
short_code: "72",
full_name: "Smith-Lemli-Opitz syndrome",
short_name: "Smith-Lemli-Opitz syndrome",
category_name: "Smith-Lemli-Opitz syndrome"
}
end
def _E7879 do
%ICDCode{full_code: "E7879",
category_code: "E78",
short_code: "79",
full_name: "Other disorders of bile acid and cholesterol metabolism",
short_name: "Other disorders of bile acid and cholesterol metabolism",
category_name: "Other disorders of bile acid and cholesterol metabolism"
}
end
def _E7881 do
%ICDCode{full_code: "E7881",
category_code: "E78",
short_code: "81",
full_name: "Lipoid dermatoarthritis",
short_name: "Lipoid dermatoarthritis",
category_name: "Lipoid dermatoarthritis"
}
end
def _E7889 do
%ICDCode{full_code: "E7889",
category_code: "E78",
short_code: "89",
full_name: "Other lipoprotein metabolism disorders",
short_name: "Other lipoprotein metabolism disorders",
category_name: "Other lipoprotein metabolism disorders"
}
end
def _E789 do
%ICDCode{full_code: "E789",
category_code: "E78",
short_code: "9",
full_name: "Disorder of lipoprotein metabolism, unspecified",
short_name: "Disorder of lipoprotein metabolism, unspecified",
category_name: "Disorder of lipoprotein metabolism, unspecified"
}
end
end
| 31.542254 | 88 | 0.609958 |
03d0eb418cbc8b21da7af6476b9d5f44df3ea6da | 847 | exs | Elixir | test/plural_rules/identity_plural_test_test.exs | KineticCafe/cldr | 7b84cd85564bca4a2c4e01c02ee0aa284bf07367 | [
"Apache-2.0"
] | 179 | 2019-06-16T09:16:00.000Z | 2022-03-30T04:04:55.000Z | test/plural_rules/identity_plural_test_test.exs | KineticCafe/cldr | 7b84cd85564bca4a2c4e01c02ee0aa284bf07367 | [
"Apache-2.0"
] | 46 | 2019-06-09T02:35:58.000Z | 2022-03-08T10:39:08.000Z | test/plural_rules/identity_plural_test_test.exs | KineticCafe/cldr | 7b84cd85564bca4a2c4e01c02ee0aa284bf07367 | [
"Apache-2.0"
] | 14 | 2020-03-03T16:35:50.000Z | 2022-02-27T14:01:40.000Z | defmodule Cldr.IdentityPluralRule.Test do
use ExUnit.Case
test "integer identity plural selection" do
substitutions = %{42 => "This is 42", :other => "This is not"}
assert TestBackend.Cldr.Number.Cardinal.pluralize(42, "en", substitutions) == "This is 42"
assert TestBackend.Cldr.Number.Ordinal.pluralize(42, "en", substitutions) == "This is 42"
end
test "float identity pluralization" do
substitutions = %{42 => "This is 42", :other => "This is not"}
assert TestBackend.Cldr.Number.Cardinal.pluralize(42.0, "en", substitutions) ==
"This is 42"
assert TestBackend.Cldr.Number.Cardinal.pluralize(Decimal.new("42.0"), "en", substitutions) ==
"This is 42"
assert TestBackend.Cldr.Number.Cardinal.pluralize(Decimal.new(42), "en", substitutions) ==
"This is 42"
end
end
| 36.826087 | 98 | 0.669421 |
03d12e2ba25a0f09856b7b264cf18d088bc78cca | 25,342 | exs | Elixir | lib/elixir/test/elixir/dynamic_supervisor_test.exs | princemaple/elixir | d894dcca3380b2a37a72e940103ae8eeb42e540e | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/dynamic_supervisor_test.exs | princemaple/elixir | d894dcca3380b2a37a72e940103ae8eeb42e540e | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/dynamic_supervisor_test.exs | princemaple/elixir | d894dcca3380b2a37a72e940103ae8eeb42e540e | [
"Apache-2.0"
] | null | null | null | Code.require_file("test_helper.exs", __DIR__)
defmodule DynamicSupervisorTest do
use ExUnit.Case, async: true
defmodule Simple do
use DynamicSupervisor
def init(args), do: args
end
test "can be supervised directly" do
children = [{DynamicSupervisor, strategy: :one_for_one, name: :dyn_sup_spec_test}]
assert {:ok, _} = Supervisor.start_link(children, strategy: :one_for_one)
assert DynamicSupervisor.which_children(:dyn_sup_spec_test) == []
end
describe "use/2" do
test "generates child_spec/1" do
assert Simple.child_spec([:hello]) == %{
id: Simple,
start: {Simple, :start_link, [[:hello]]},
type: :supervisor
}
defmodule Custom do
use DynamicSupervisor,
id: :id,
restart: :temporary,
shutdown: :infinity,
start: {:foo, :bar, []}
def init(arg), do: {:producer, arg}
end
assert Custom.child_spec([:hello]) == %{
id: :id,
restart: :temporary,
shutdown: :infinity,
start: {:foo, :bar, []},
type: :supervisor
}
end
end
describe "init/1" do
test "set default options" do
assert DynamicSupervisor.init(strategy: :one_for_one) ==
{:ok,
%{
strategy: :one_for_one,
intensity: 3,
period: 5,
max_children: :infinity,
extra_arguments: []
}}
end
end
describe "start_link/3" do
test "with non-ok init" do
Process.flag(:trap_exit, true)
assert DynamicSupervisor.start_link(Simple, {:ok, %{strategy: :unknown}}) ==
{:error, {:supervisor_data, {:invalid_strategy, :unknown}}}
assert DynamicSupervisor.start_link(Simple, {:ok, %{intensity: -1}}) ==
{:error, {:supervisor_data, {:invalid_intensity, -1}}}
assert DynamicSupervisor.start_link(Simple, {:ok, %{period: 0}}) ==
{:error, {:supervisor_data, {:invalid_period, 0}}}
assert DynamicSupervisor.start_link(Simple, {:ok, %{max_children: -1}}) ==
{:error, {:supervisor_data, {:invalid_max_children, -1}}}
assert DynamicSupervisor.start_link(Simple, {:ok, %{extra_arguments: -1}}) ==
{:error, {:supervisor_data, {:invalid_extra_arguments, -1}}}
assert DynamicSupervisor.start_link(Simple, :unknown) ==
{:error, {:bad_return, {Simple, :init, :unknown}}}
assert DynamicSupervisor.start_link(Simple, :ignore) == :ignore
end
test "with registered process" do
{:ok, pid} = DynamicSupervisor.start_link(Simple, {:ok, %{}}, name: __MODULE__)
# Sets up a link
{:links, links} = Process.info(self(), :links)
assert pid in links
# A name
assert Process.whereis(__MODULE__) == pid
# And the initial call
assert {:supervisor, DynamicSupervisorTest.Simple, 1} =
:proc_lib.translate_initial_call(pid)
end
test "sets initial call to the same as a regular supervisor" do
{:ok, pid} = Supervisor.start_link([], strategy: :one_for_one)
assert :proc_lib.initial_call(pid) == {:supervisor, Supervisor.Default, [:Argument__1]}
{:ok, pid} = DynamicSupervisor.start_link(strategy: :one_for_one)
assert :proc_lib.initial_call(pid) == {:supervisor, Supervisor.Default, [:Argument__1]}
end
test "returns the callback module" do
{:ok, pid} = Supervisor.start_link([], strategy: :one_for_one)
assert :supervisor.get_callback_module(pid) == Supervisor.Default
{:ok, pid} = DynamicSupervisor.start_link(strategy: :one_for_one)
assert :supervisor.get_callback_module(pid) == Supervisor.Default
end
end
## Code change
describe "code_change/3" do
test "with non-ok init" do
{:ok, pid} = DynamicSupervisor.start_link(Simple, {:ok, %{}})
assert fake_upgrade(pid, {:ok, %{strategy: :unknown}}) ==
{:error, {:error, {:supervisor_data, {:invalid_strategy, :unknown}}}}
assert fake_upgrade(pid, {:ok, %{intensity: -1}}) ==
{:error, {:error, {:supervisor_data, {:invalid_intensity, -1}}}}
assert fake_upgrade(pid, {:ok, %{period: 0}}) ==
{:error, {:error, {:supervisor_data, {:invalid_period, 0}}}}
assert fake_upgrade(pid, {:ok, %{max_children: -1}}) ==
{:error, {:error, {:supervisor_data, {:invalid_max_children, -1}}}}
assert fake_upgrade(pid, :unknown) == {:error, :unknown}
assert fake_upgrade(pid, :ignore) == :ok
end
test "with ok init" do
{:ok, pid} = DynamicSupervisor.start_link(Simple, {:ok, %{}})
{:ok, _} = DynamicSupervisor.start_child(pid, sleepy_worker())
assert %{active: 1} = DynamicSupervisor.count_children(pid)
assert fake_upgrade(pid, {:ok, %{max_children: 1}}) == :ok
assert %{active: 1} = DynamicSupervisor.count_children(pid)
assert DynamicSupervisor.start_child(pid, {Task, fn -> :ok end}) == {:error, :max_children}
end
defp fake_upgrade(pid, args) do
:ok = :sys.suspend(pid)
:sys.replace_state(pid, fn state -> %{state | args: args} end)
res = :sys.change_code(pid, :gen_server, 123, :extra)
:ok = :sys.resume(pid)
res
end
end
describe "start_child/2" do
test "supports old child spec" do
{:ok, pid} = DynamicSupervisor.start_link(strategy: :one_for_one)
child = {Task, {Task, :start_link, [fn -> :ok end]}, :temporary, 5000, :worker, [Task]}
assert {:ok, pid} = DynamicSupervisor.start_child(pid, child)
assert is_pid(pid)
end
test "supports new child spec as tuple" do
{:ok, pid} = DynamicSupervisor.start_link(strategy: :one_for_one)
child = %{id: Task, restart: :temporary, start: {Task, :start_link, [fn -> :ok end]}}
assert {:ok, pid} = DynamicSupervisor.start_child(pid, child)
assert is_pid(pid)
end
test "supports new child spec" do
{:ok, pid} = DynamicSupervisor.start_link(strategy: :one_for_one)
child = {Task, fn -> :timer.sleep(:infinity) end}
assert {:ok, pid} = DynamicSupervisor.start_child(pid, child)
assert is_pid(pid)
end
test "supports extra arguments" do
parent = self()
fun = fn -> send(parent, :from_child) end
{:ok, pid} = DynamicSupervisor.start_link(strategy: :one_for_one, extra_arguments: [fun])
child = %{id: Task, restart: :temporary, start: {Task, :start_link, []}}
assert {:ok, pid} = DynamicSupervisor.start_child(pid, child)
assert is_pid(pid)
assert_receive :from_child
end
test "with invalid child spec" do
assert DynamicSupervisor.start_child(:not_used, %{}) == {:error, {:invalid_child_spec, %{}}}
assert DynamicSupervisor.start_child(:not_used, {1, 2, 3, 4, 5, 6}) ==
{:error, {:invalid_mfa, 2}}
assert DynamicSupervisor.start_child(:not_used, %{id: 1, start: {Task, :foo, :bar}}) ==
{:error, {:invalid_mfa, {Task, :foo, :bar}}}
end
test "with different returns" do
{:ok, pid} = DynamicSupervisor.start_link(strategy: :one_for_one)
assert {:ok, _, :extra} = DynamicSupervisor.start_child(pid, current_module_worker([:ok3]))
assert {:ok, _} = DynamicSupervisor.start_child(pid, current_module_worker([:ok2]))
assert :ignore = DynamicSupervisor.start_child(pid, current_module_worker([:ignore]))
assert {:error, :found} =
DynamicSupervisor.start_child(pid, current_module_worker([:error]))
assert {:error, :unknown} =
DynamicSupervisor.start_child(pid, current_module_worker([:unknown]))
end
test "with throw/error/exit" do
{:ok, pid} = DynamicSupervisor.start_link(strategy: :one_for_one)
assert {:error, {{:nocatch, :oops}, [_ | _]}} =
DynamicSupervisor.start_child(pid, current_module_worker([:non_local, :throw]))
assert {:error, {%RuntimeError{}, [_ | _]}} =
DynamicSupervisor.start_child(pid, current_module_worker([:non_local, :error]))
assert {:error, :oops} =
DynamicSupervisor.start_child(pid, current_module_worker([:non_local, :exit]))
end
test "with max_children" do
{:ok, pid} = DynamicSupervisor.start_link(strategy: :one_for_one, max_children: 0)
assert {:error, :max_children} =
DynamicSupervisor.start_child(pid, current_module_worker([:ok2]))
end
test "temporary child is not restarted regardless of reason" do
child = current_module_worker([:ok2], restart: :temporary)
{:ok, pid} = DynamicSupervisor.start_link(strategy: :one_for_one)
assert {:ok, child_pid} = DynamicSupervisor.start_child(pid, child)
assert_kill(child_pid, :shutdown)
assert %{workers: 0, active: 0} = DynamicSupervisor.count_children(pid)
assert {:ok, child_pid} = DynamicSupervisor.start_child(pid, child)
assert_kill(child_pid, :whatever)
assert %{workers: 0, active: 0} = DynamicSupervisor.count_children(pid)
end
test "transient child is restarted unless normal/shutdown/{shutdown, _}" do
child = current_module_worker([:ok2], restart: :transient)
{:ok, pid} = DynamicSupervisor.start_link(strategy: :one_for_one)
assert {:ok, child_pid} = DynamicSupervisor.start_child(pid, child)
assert_kill(child_pid, :shutdown)
assert %{workers: 0, active: 0} = DynamicSupervisor.count_children(pid)
assert {:ok, child_pid} = DynamicSupervisor.start_child(pid, child)
assert_kill(child_pid, {:shutdown, :signal})
assert %{workers: 0, active: 0} = DynamicSupervisor.count_children(pid)
assert {:ok, child_pid} = DynamicSupervisor.start_child(pid, child)
assert_kill(child_pid, :whatever)
assert %{workers: 1, active: 1} = DynamicSupervisor.count_children(pid)
end
test "permanent child is restarted regardless of reason" do
child = current_module_worker([:ok2], restart: :permanent)
{:ok, pid} = DynamicSupervisor.start_link(strategy: :one_for_one, max_restarts: 100_000)
assert {:ok, child_pid} = DynamicSupervisor.start_child(pid, child)
assert_kill(child_pid, :shutdown)
assert %{workers: 1, active: 1} = DynamicSupervisor.count_children(pid)
assert {:ok, child_pid} = DynamicSupervisor.start_child(pid, child)
assert_kill(child_pid, {:shutdown, :signal})
assert %{workers: 2, active: 2} = DynamicSupervisor.count_children(pid)
assert {:ok, child_pid} = DynamicSupervisor.start_child(pid, child)
assert_kill(child_pid, :whatever)
assert %{workers: 3, active: 3} = DynamicSupervisor.count_children(pid)
end
test "child is restarted with different values" do
{:ok, pid} = DynamicSupervisor.start_link(strategy: :one_for_one, max_restarts: 100_000)
assert {:ok, child1} =
DynamicSupervisor.start_child(pid, current_module_worker([:restart, :ok2]))
assert [{:undefined, ^child1, :worker, [DynamicSupervisorTest]}] =
DynamicSupervisor.which_children(pid)
assert_kill(child1, :shutdown)
assert %{workers: 1, active: 1} = DynamicSupervisor.count_children(pid)
assert {:ok, child2} =
DynamicSupervisor.start_child(pid, current_module_worker([:restart, :ok3]))
assert [
{:undefined, _, :worker, [DynamicSupervisorTest]},
{:undefined, ^child2, :worker, [DynamicSupervisorTest]}
] = DynamicSupervisor.which_children(pid)
assert_kill(child2, :shutdown)
assert %{workers: 2, active: 2} = DynamicSupervisor.count_children(pid)
assert {:ok, child3} =
DynamicSupervisor.start_child(pid, current_module_worker([:restart, :ignore]))
assert [
{:undefined, _, :worker, [DynamicSupervisorTest]},
{:undefined, _, :worker, [DynamicSupervisorTest]},
{:undefined, _, :worker, [DynamicSupervisorTest]}
] = DynamicSupervisor.which_children(pid)
assert_kill(child3, :shutdown)
assert %{workers: 2, active: 2} = DynamicSupervisor.count_children(pid)
assert {:ok, child4} =
DynamicSupervisor.start_child(pid, current_module_worker([:restart, :error]))
assert [
{:undefined, _, :worker, [DynamicSupervisorTest]},
{:undefined, _, :worker, [DynamicSupervisorTest]},
{:undefined, _, :worker, [DynamicSupervisorTest]}
] = DynamicSupervisor.which_children(pid)
assert_kill(child4, :shutdown)
assert %{workers: 3, active: 2} = DynamicSupervisor.count_children(pid)
assert {:ok, child5} =
DynamicSupervisor.start_child(pid, current_module_worker([:restart, :unknown]))
assert [
{:undefined, _, :worker, [DynamicSupervisorTest]},
{:undefined, _, :worker, [DynamicSupervisorTest]},
{:undefined, :restarting, :worker, [DynamicSupervisorTest]},
{:undefined, _, :worker, [DynamicSupervisorTest]}
] = DynamicSupervisor.which_children(pid)
assert_kill(child5, :shutdown)
assert %{workers: 4, active: 2} = DynamicSupervisor.count_children(pid)
end
test "restarting children counted in max_children" do
child = current_module_worker([:restart, :error], restart: :permanent)
opts = [strategy: :one_for_one, max_children: 1, max_restarts: 100_000]
{:ok, pid} = DynamicSupervisor.start_link(opts)
assert {:ok, child_pid} = DynamicSupervisor.start_child(pid, child)
assert_kill(child_pid, :shutdown)
assert %{workers: 1, active: 0} = DynamicSupervisor.count_children(pid)
child = current_module_worker([:restart, :ok2], restart: :permanent)
assert {:error, :max_children} = DynamicSupervisor.start_child(pid, child)
end
test "child is restarted when trying again" do
child = current_module_worker([:try_again, self()], restart: :permanent)
{:ok, pid} = DynamicSupervisor.start_link(strategy: :one_for_one, max_restarts: 2)
assert {:ok, child_pid} = DynamicSupervisor.start_child(pid, child)
assert_received {:try_again, true}
assert_kill(child_pid, :shutdown)
assert_receive {:try_again, false}
assert_receive {:try_again, true}
assert %{workers: 1, active: 1} = DynamicSupervisor.count_children(pid)
end
test "child triggers maximum restarts" do
Process.flag(:trap_exit, true)
child = current_module_worker([:restart, :error], restart: :permanent)
{:ok, pid} = DynamicSupervisor.start_link(strategy: :one_for_one, max_restarts: 1)
assert {:ok, child_pid} = DynamicSupervisor.start_child(pid, child)
assert_kill(child_pid, :shutdown)
assert_receive {:EXIT, ^pid, :shutdown}
end
test "child triggers maximum intensity when trying again" do
Process.flag(:trap_exit, true)
child = current_module_worker([:restart, :error], restart: :permanent)
{:ok, pid} = DynamicSupervisor.start_link(strategy: :one_for_one, max_restarts: 10)
assert {:ok, child_pid} = DynamicSupervisor.start_child(pid, child)
assert_kill(child_pid, :shutdown)
assert_receive {:EXIT, ^pid, :shutdown}
end
def start_link(:ok3), do: {:ok, spawn_link(fn -> :timer.sleep(:infinity) end), :extra}
def start_link(:ok2), do: {:ok, spawn_link(fn -> :timer.sleep(:infinity) end)}
def start_link(:error), do: {:error, :found}
def start_link(:ignore), do: :ignore
def start_link(:unknown), do: :unknown
def start_link(:non_local, :throw), do: throw(:oops)
def start_link(:non_local, :error), do: raise("oops")
def start_link(:non_local, :exit), do: exit(:oops)
def start_link(:try_again, notify) do
if Process.get(:try_again) do
Process.put(:try_again, false)
send(notify, {:try_again, false})
{:error, :try_again}
else
Process.put(:try_again, true)
send(notify, {:try_again, true})
start_link(:ok2)
end
end
def start_link(:restart, value) do
if Process.get({:restart, value}) do
start_link(value)
else
Process.put({:restart, value}, true)
start_link(:ok2)
end
end
end
describe "terminate/2" do
test "terminates children with brutal kill" do
Process.flag(:trap_exit, true)
{:ok, sup} = DynamicSupervisor.start_link(strategy: :one_for_one)
child = sleepy_worker(shutdown: :brutal_kill)
assert {:ok, child1} = DynamicSupervisor.start_child(sup, child)
assert {:ok, child2} = DynamicSupervisor.start_child(sup, child)
assert {:ok, child3} = DynamicSupervisor.start_child(sup, child)
Process.monitor(child1)
Process.monitor(child2)
Process.monitor(child3)
assert_kill(sup, :shutdown)
assert_receive {:DOWN, _, :process, ^child1, :killed}
assert_receive {:DOWN, _, :process, ^child2, :killed}
assert_receive {:DOWN, _, :process, ^child3, :killed}
end
test "terminates children with infinity shutdown" do
Process.flag(:trap_exit, true)
{:ok, sup} = DynamicSupervisor.start_link(strategy: :one_for_one)
child = sleepy_worker(shutdown: :infinity)
assert {:ok, child1} = DynamicSupervisor.start_child(sup, child)
assert {:ok, child2} = DynamicSupervisor.start_child(sup, child)
assert {:ok, child3} = DynamicSupervisor.start_child(sup, child)
Process.monitor(child1)
Process.monitor(child2)
Process.monitor(child3)
assert_kill(sup, :shutdown)
assert_receive {:DOWN, _, :process, ^child1, :shutdown}
assert_receive {:DOWN, _, :process, ^child2, :shutdown}
assert_receive {:DOWN, _, :process, ^child3, :shutdown}
end
test "terminates children with infinity shutdown and abnormal reason" do
Process.flag(:trap_exit, true)
{:ok, sup} = DynamicSupervisor.start_link(strategy: :one_for_one)
fun = fn ->
Process.flag(:trap_exit, true)
receive(do: (_ -> exit({:shutdown, :oops})))
end
child = Supervisor.child_spec({Task, fun}, shutdown: :infinity)
assert {:ok, child1} = DynamicSupervisor.start_child(sup, child)
assert {:ok, child2} = DynamicSupervisor.start_child(sup, child)
assert {:ok, child3} = DynamicSupervisor.start_child(sup, child)
Process.monitor(child1)
Process.monitor(child2)
Process.monitor(child3)
assert_kill(sup, :shutdown)
assert_receive {:DOWN, _, :process, ^child1, {:shutdown, :oops}}
assert_receive {:DOWN, _, :process, ^child2, {:shutdown, :oops}}
assert_receive {:DOWN, _, :process, ^child3, {:shutdown, :oops}}
end
test "terminates children with integer shutdown" do
Process.flag(:trap_exit, true)
{:ok, sup} = DynamicSupervisor.start_link(strategy: :one_for_one)
child = sleepy_worker(shutdown: 1000)
assert {:ok, child1} = DynamicSupervisor.start_child(sup, child)
assert {:ok, child2} = DynamicSupervisor.start_child(sup, child)
assert {:ok, child3} = DynamicSupervisor.start_child(sup, child)
Process.monitor(child1)
Process.monitor(child2)
Process.monitor(child3)
assert_kill(sup, :shutdown)
assert_receive {:DOWN, _, :process, ^child1, :shutdown}
assert_receive {:DOWN, _, :process, ^child2, :shutdown}
assert_receive {:DOWN, _, :process, ^child3, :shutdown}
end
test "terminates children with integer shutdown and abnormal reason" do
Process.flag(:trap_exit, true)
{:ok, sup} = DynamicSupervisor.start_link(strategy: :one_for_one)
fun = fn ->
Process.flag(:trap_exit, true)
receive(do: (_ -> exit({:shutdown, :oops})))
end
child = Supervisor.child_spec({Task, fun}, shutdown: 1000)
assert {:ok, child1} = DynamicSupervisor.start_child(sup, child)
assert {:ok, child2} = DynamicSupervisor.start_child(sup, child)
assert {:ok, child3} = DynamicSupervisor.start_child(sup, child)
Process.monitor(child1)
Process.monitor(child2)
Process.monitor(child3)
assert_kill(sup, :shutdown)
assert_receive {:DOWN, _, :process, ^child1, {:shutdown, :oops}}
assert_receive {:DOWN, _, :process, ^child2, {:shutdown, :oops}}
assert_receive {:DOWN, _, :process, ^child3, {:shutdown, :oops}}
end
test "terminates children with expired integer shutdown" do
Process.flag(:trap_exit, true)
{:ok, sup} = DynamicSupervisor.start_link(strategy: :one_for_one)
fun = fn ->
:timer.sleep(:infinity)
end
tmt = fn ->
Process.flag(:trap_exit, true)
:timer.sleep(:infinity)
end
child_fun = Supervisor.child_spec({Task, fun}, shutdown: 1)
child_tmt = Supervisor.child_spec({Task, tmt}, shutdown: 1)
assert {:ok, child1} = DynamicSupervisor.start_child(sup, child_fun)
assert {:ok, child2} = DynamicSupervisor.start_child(sup, child_tmt)
assert {:ok, child3} = DynamicSupervisor.start_child(sup, child_fun)
Process.monitor(child1)
Process.monitor(child2)
Process.monitor(child3)
assert_kill(sup, :shutdown)
assert_receive {:DOWN, _, :process, ^child1, :shutdown}
assert_receive {:DOWN, _, :process, ^child2, :killed}
assert_receive {:DOWN, _, :process, ^child3, :shutdown}
end
test "terminates children with permanent restart and normal reason" do
Process.flag(:trap_exit, true)
{:ok, sup} = DynamicSupervisor.start_link(strategy: :one_for_one)
fun = fn ->
Process.flag(:trap_exit, true)
receive(do: (_ -> exit(:normal)))
end
child = Supervisor.child_spec({Task, fun}, shutdown: :infinity, restart: :permanent)
assert {:ok, child1} = DynamicSupervisor.start_child(sup, child)
assert {:ok, child2} = DynamicSupervisor.start_child(sup, child)
assert {:ok, child3} = DynamicSupervisor.start_child(sup, child)
Process.monitor(child1)
Process.monitor(child2)
Process.monitor(child3)
assert_kill(sup, :shutdown)
assert_receive {:DOWN, _, :process, ^child1, :normal}
assert_receive {:DOWN, _, :process, ^child2, :normal}
assert_receive {:DOWN, _, :process, ^child3, :normal}
end
test "terminates with mixed children" do
Process.flag(:trap_exit, true)
{:ok, sup} = DynamicSupervisor.start_link(strategy: :one_for_one)
assert {:ok, child1} =
DynamicSupervisor.start_child(sup, sleepy_worker(shutdown: :infinity))
assert {:ok, child2} =
DynamicSupervisor.start_child(sup, sleepy_worker(shutdown: :brutal_kill))
Process.monitor(child1)
Process.monitor(child2)
assert_kill(sup, :shutdown)
assert_receive {:DOWN, _, :process, ^child1, :shutdown}
assert_receive {:DOWN, _, :process, ^child2, :killed}
end
end
describe "terminate_child/2" do
test "terminates child with brutal kill" do
{:ok, sup} = DynamicSupervisor.start_link(strategy: :one_for_one)
child = sleepy_worker(shutdown: :brutal_kill)
assert {:ok, child_pid} = DynamicSupervisor.start_child(sup, child)
Process.monitor(child_pid)
assert :ok = DynamicSupervisor.terminate_child(sup, child_pid)
assert_receive {:DOWN, _, :process, ^child_pid, :killed}
assert {:error, :not_found} = DynamicSupervisor.terminate_child(sup, child_pid)
assert %{workers: 0, active: 0} = DynamicSupervisor.count_children(sup)
end
test "terminates child with integer shutdown" do
{:ok, sup} = DynamicSupervisor.start_link(strategy: :one_for_one)
child = sleepy_worker(shutdown: 1000)
assert {:ok, child_pid} = DynamicSupervisor.start_child(sup, child)
Process.monitor(child_pid)
assert :ok = DynamicSupervisor.terminate_child(sup, child_pid)
assert_receive {:DOWN, _, :process, ^child_pid, :shutdown}
assert {:error, :not_found} = DynamicSupervisor.terminate_child(sup, child_pid)
assert %{workers: 0, active: 0} = DynamicSupervisor.count_children(sup)
end
test "terminates restarting child" do
{:ok, sup} = DynamicSupervisor.start_link(strategy: :one_for_one, max_restarts: 100_000)
child = current_module_worker([:restart, :error], restart: :permanent)
assert {:ok, child_pid} = DynamicSupervisor.start_child(sup, child)
assert_kill(child_pid, :shutdown)
assert :ok = DynamicSupervisor.terminate_child(sup, child_pid)
assert {:error, :not_found} = DynamicSupervisor.terminate_child(sup, child_pid)
assert %{workers: 0, active: 0} = DynamicSupervisor.count_children(sup)
end
end
defp sleepy_worker(opts \\ []) do
mfa = {Task, :start_link, [:timer, :sleep, [:infinity]]}
Supervisor.child_spec(%{id: Task, start: mfa}, opts)
end
defp current_module_worker(args, opts \\ []) do
Supervisor.child_spec(%{id: __MODULE__, start: {__MODULE__, :start_link, args}}, opts)
end
defp assert_kill(pid, reason) do
ref = Process.monitor(pid)
Process.exit(pid, reason)
assert_receive {:DOWN, ^ref, _, _, _}
end
end
| 39.16847 | 98 | 0.648725 |
03d1887639d3fc764066325d3adf3b387d69d388 | 338 | ex | Elixir | lib/slack_autolinker/plug.ex | wojtekmach/slack_autolinker | a3623e406439f387a19456055644263993c974ff | [
"MIT"
] | 3 | 2017-07-28T14:19:20.000Z | 2021-02-09T15:01:25.000Z | lib/slack_autolinker/plug.ex | socialpaymentsbv/slack_autolinker | a3623e406439f387a19456055644263993c974ff | [
"MIT"
] | 7 | 2017-02-27T08:54:41.000Z | 2020-03-18T10:18:49.000Z | lib/slack_autolinker/plug.ex | socialpaymentsbv/slack_autolinker | a3623e406439f387a19456055644263993c974ff | [
"MIT"
] | 2 | 2017-03-16T12:13:55.000Z | 2021-01-14T11:51:11.000Z | defmodule SlackAutolinker.Plug do
@moduledoc false
import Plug.Conn
def init(options), do: options
def call(conn, _opts) do
{:ok, vsn} = :application.get_key(:slack_autolinker, :vsn)
conn
|> put_resp_content_type("application/json")
|> send_resp(200, Poison.encode!(%{version: List.to_string(vsn)}))
end
end
| 21.125 | 70 | 0.692308 |
03d188f9f0db5a6a8facb6245b61e39ac3d8869e | 1,589 | ex | Elixir | apps/fz_http/lib/fz_http_web/controllers/auth_controller.ex | mdp/firezone | 53d8f0803a7ef005fdca3ae8c6fa9c3483ae5cbc | [
"Apache-2.0"
] | null | null | null | apps/fz_http/lib/fz_http_web/controllers/auth_controller.ex | mdp/firezone | 53d8f0803a7ef005fdca3ae8c6fa9c3483ae5cbc | [
"Apache-2.0"
] | null | null | null | apps/fz_http/lib/fz_http_web/controllers/auth_controller.ex | mdp/firezone | 53d8f0803a7ef005fdca3ae8c6fa9c3483ae5cbc | [
"Apache-2.0"
] | null | null | null | defmodule FzHttpWeb.AuthController do
@moduledoc """
Implements the CRUD for a Session
"""
use FzHttpWeb, :controller
alias FzHttpWeb.Authentication
alias FzHttpWeb.Router.Helpers, as: Routes
alias FzHttpWeb.UserFromAuth
# Uncomment when Helpers.callback_url/1 is fixed
# alias Ueberauth.Strategy.Helpers
plug Ueberauth
def request(conn, _params) do
# XXX: Helpers.callback_url/1 generates the wrong URL behind nginx.
# This is a bug in Ueberauth. auth_url is used instead.
url = Routes.auth_url(conn, :callback, :identity)
conn
|> render("request.html", callback_url: url)
end
def callback(%{assigns: %{ueberauth_failure: %{errors: errors}}} = conn, _params) do
msg =
errors
|> Enum.map_join(". ", fn error -> error.message end)
conn
|> put_flash(:error, msg)
|> redirect(to: Routes.root_path(conn, :index))
end
def callback(%{assigns: %{ueberauth_auth: auth}} = conn, _params) do
case UserFromAuth.find_or_create(auth) do
{:ok, user} ->
conn
|> configure_session(renew: true)
|> put_session(:live_socket_id, "users_socket:#{user.id}")
|> Authentication.sign_in(user, auth)
|> redirect(to: root_path_for_role(conn, user.role))
{:error, reason} ->
conn
|> put_flash(:error, "Error signing in: #{reason}")
|> request(%{})
end
end
def delete(conn, _params) do
conn
|> Authentication.sign_out()
|> put_flash(:info, "You are now signed out.")
|> redirect(to: Routes.root_path(conn, :index))
end
end
| 27.396552 | 86 | 0.650724 |
03d1934b43da58eed7b3802b36c82ab8ea34e7a9 | 2,985 | ex | Elixir | apps/nerves_hub_www/lib/nerves_hub_www/accounts/email.ex | Gazler/nerves_hub_web | 9a636a17310382819eaa6cee590e053cb47f0dcc | [
"Apache-2.0"
] | null | null | null | apps/nerves_hub_www/lib/nerves_hub_www/accounts/email.ex | Gazler/nerves_hub_web | 9a636a17310382819eaa6cee590e053cb47f0dcc | [
"Apache-2.0"
] | null | null | null | apps/nerves_hub_www/lib/nerves_hub_www/accounts/email.ex | Gazler/nerves_hub_web | 9a636a17310382819eaa6cee590e053cb47f0dcc | [
"Apache-2.0"
] | null | null | null | defmodule NervesHubWWW.Accounts.Email do
use Bamboo.Phoenix, view: NervesHubWWWWeb.EmailView
alias NervesHubWebCore.Accounts.{Invite, Org, User, OrgUser}
@from {"NervesHub", "no-reply@nerves-hub.org"}
def invite(%Invite{} = invite, %Org{} = org) do
new_email()
|> from(@from)
|> to(invite.email)
|> subject("[NervesHub] Hi from NervesHub!")
|> put_layout({NervesHubWWWWeb.LayoutView, :email})
|> render("invite.html", invite: invite, org: org)
end
def forgot_password(%User{email: email, password_reset_token: token} = user)
when is_binary(token) do
new_email()
|> from(@from)
|> to(email)
|> subject("[NervesHub] Reset NervesHub Password")
|> put_layout({NervesHubWWWWeb.LayoutView, :email})
|> render("forgot_password.html", user: user)
end
def org_user_created(email, %Org{} = org) do
new_email()
|> from(@from)
|> to(email)
|> subject("[NervesHub] Welcome to #{org.name}")
|> put_layout({NervesHubWWWWeb.LayoutView, :email})
|> render("org_user_created.html", org: org)
end
@doc """
Create an email that announces the addition of a new user. The email is sent
to each of the organization's users - except the new user. The email addresses
are specified via BCC. The instigator is the user who initiated the addition of the user.
"""
def tell_org_user_added(%Org{} = org, org_users, instigator, %User{} = new_user) do
org_users_emails =
generate_org_users_emails(org_users)
|> Enum.filter(fn email -> email != new_user.email end)
new_email()
|> from(@from)
|> subject("[NervesHub] User #{instigator} added #{new_user.username} to #{org.name}")
|> to(@from)
|> bcc(org_users_emails)
|> put_layout({NervesHubWWWWeb.LayoutView, :email})
|> render("tell_org_user_added.html", instigator: instigator, user: new_user, org: org)
end
@doc """
Create an email that announces the removal of a user. The email is
sent to all of the the organization's users. The email addresses are specified via BCC.
The instigator is the user who initiated the removal of the user.
"""
def tell_org_user_removed(%Org{} = org, org_users, instigator, %User{} = user_removed) do
org_users_emails = generate_org_users_emails(org_users)
new_email()
|> from(@from)
|> subject("[NervesHub] User #{instigator} removed #{user_removed.username} from #{org.name}")
|> to(@from)
|> bcc(org_users_emails)
|> put_layout({NervesHubWWWWeb.LayoutView, :email})
|> render("tell_org_user_removed.html", instigator: instigator, user: user_removed, org: org)
end
defp generate_org_users_emails(org_users) do
# NB:Note there is a check for nil email addresses. This can
# occur during testing and, in any event, causes the Bamboo emailer to crash
org_users
|> Enum.reduce(
[],
fn %OrgUser{user: %User{email: email}}, acc when email != nil ->
[email | acc]
end
)
end
end
| 35.535714 | 98 | 0.675042 |
03d1b0e467e0be98f42fad0915a0b2c76b0e2f0e | 588 | exs | Elixir | priv/repo/seeds.exs | aleccool213/elixir-graphql-example | 8f89f4898cda7b7544321ff8dda2f76ea22d4c58 | [
"MIT"
] | 1 | 2018-01-16T10:28:38.000Z | 2018-01-16T10:28:38.000Z | priv/repo/seeds.exs | aleccool213/elixir-graphql-example | 8f89f4898cda7b7544321ff8dda2f76ea22d4c58 | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | aleccool213/elixir-graphql-example | 8f89f4898cda7b7544321ff8dda2f76ea22d4c58 | [
"MIT"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Community.Repo.insert!(%Community.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
alias Community.News.Link
alias Community.Repo
%Link{url: "http://graphql.org/", description: "The Best Query Language"} |> Repo.insert!
%Link{url: "http://dev.apollodata.com/", description: "Awesome GraphQL Client"} |> Repo.insert! | 36.75 | 95 | 0.717687 |
03d1dbac2c22c16a4884262cd21fe9fc67dd8402 | 120 | exs | Elixir | test/ex_unit_tap_test.exs | mblayman/ex_unit_tap | 860f8b2b6d1aba10844bc9e78f12b572ff6ec961 | [
"MIT"
] | null | null | null | test/ex_unit_tap_test.exs | mblayman/ex_unit_tap | 860f8b2b6d1aba10844bc9e78f12b572ff6ec961 | [
"MIT"
] | 1 | 2016-07-24T01:31:37.000Z | 2016-07-24T01:31:37.000Z | test/ex_unit_tap_test.exs | mblayman/ex_unit_tap | 860f8b2b6d1aba10844bc9e78f12b572ff6ec961 | [
"MIT"
] | null | null | null | defmodule ExUnitTapTest do
use ExUnit.Case
doctest ExUnitTap
test "the truth" do
assert 1 + 1 == 2
end
end
| 13.333333 | 26 | 0.683333 |
03d1ff7285c250a61a7b7e4e6731ebc2f4c1611f | 454 | ex | Elixir | lib/fipe_crawler/supervisor.ex | teofilosalgado/FipeCrawler | 0e25bc10bb23da2b8cb0204fd1db07a07cc7b4c5 | [
"MIT"
] | 1 | 2021-01-14T01:12:24.000Z | 2021-01-14T01:12:24.000Z | lib/fipe_crawler/supervisor.ex | teofilosalgado/FipeCrawler | 0e25bc10bb23da2b8cb0204fd1db07a07cc7b4c5 | [
"MIT"
] | null | null | null | lib/fipe_crawler/supervisor.ex | teofilosalgado/FipeCrawler | 0e25bc10bb23da2b8cb0204fd1db07a07cc7b4c5 | [
"MIT"
] | null | null | null | defmodule FipeCrawler.Supervisor do
use Supervisor
def start_link(init_arg) do
Supervisor.start_link(__MODULE__, init_arg, name: :supervisor)
end
@impl true
def init(_init_arg) do
children = [
FipeCrawler.Worker.Database,
FipeCrawler.Worker.Marcas,
FipeCrawler.Worker.Modelos,
FipeCrawler.Worker.Anos,
FipeCrawler.Worker.Informacoes
]
Supervisor.init(children, strategy: :one_for_one)
end
end
| 21.619048 | 66 | 0.715859 |
03d20b559b77c0d9841ffa59344abc03e1927caa | 87 | ex | Elixir | lib/elite_investigations_web/views/layout_view.ex | lee-dohm/elite-investigations | 6511bd1c734bcc7d1e4177b73006891fd1d81855 | [
"MIT"
] | null | null | null | lib/elite_investigations_web/views/layout_view.ex | lee-dohm/elite-investigations | 6511bd1c734bcc7d1e4177b73006891fd1d81855 | [
"MIT"
] | 8 | 2019-03-14T16:31:37.000Z | 2019-03-31T16:14:57.000Z | lib/elite_investigations_web/views/layout_view.ex | lee-dohm/elite-investigations | 6511bd1c734bcc7d1e4177b73006891fd1d81855 | [
"MIT"
] | null | null | null | defmodule EliteInvestigationsWeb.LayoutView do
use EliteInvestigationsWeb, :view
end
| 21.75 | 46 | 0.862069 |
03d219655da1ca44d420c6226f9fbd8592a4bdd9 | 229 | exs | Elixir | test/vintage_net_lte/at_parser_test.exs | LostKobrakai/vintage_net_lte | 7c280f38112bf9a5c1785d883302bab73b8000ee | [
"Apache-2.0"
] | null | null | null | test/vintage_net_lte/at_parser_test.exs | LostKobrakai/vintage_net_lte | 7c280f38112bf9a5c1785d883302bab73b8000ee | [
"Apache-2.0"
] | null | null | null | test/vintage_net_lte/at_parser_test.exs | LostKobrakai/vintage_net_lte | 7c280f38112bf9a5c1785d883302bab73b8000ee | [
"Apache-2.0"
] | null | null | null | defmodule VintageNetLTE.ATParserTest do
use ExUnit.Case
alias VintageNetLTE.ATParser
test "can parse the at response for signal quality" do
assert {:csq, {5, 99}} == ATParser.parse_at_response("+CSQ: 5,99")
end
end
| 22.9 | 70 | 0.729258 |
03d29ab55fc6ac1456fa7c0abf389e4b601b2422 | 1,867 | exs | Elixir | clients/testing/mix.exs | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/testing/mix.exs | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/testing/mix.exs | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Testing.Mixfile do
use Mix.Project
@version "0.21.0"
def project() do
[
app: :google_api_testing,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/testing"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.4"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Cloud Testing API client library. Allows developers to run automated tests for their mobile applications on Google infrastructure.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/testing",
"Homepage" => "https://developers.google.com/cloud-test-lab/"
}
]
end
end
| 27.865672 | 134 | 0.658275 |
03d2be7ddd039ef617d24e38a0b702dfeeee113d | 20,838 | ex | Elixir | lib/event_store/subscriptions/subscription_fsm.ex | kaikuchn/eventstore | cfe77cb22860526ef3591ba43649ceffc3175259 | [
"MIT"
] | null | null | null | lib/event_store/subscriptions/subscription_fsm.ex | kaikuchn/eventstore | cfe77cb22860526ef3591ba43649ceffc3175259 | [
"MIT"
] | null | null | null | lib/event_store/subscriptions/subscription_fsm.ex | kaikuchn/eventstore | cfe77cb22860526ef3591ba43649ceffc3175259 | [
"MIT"
] | null | null | null | defmodule EventStore.Subscriptions.SubscriptionFsm do
@moduledoc false
alias EventStore.{AdvisoryLocks, RecordedEvent, Registration, Storage}
alias EventStore.Streams.Stream
alias EventStore.Subscriptions.{SubscriptionState, Subscriber}
use Fsm, initial_state: :initial, initial_data: %SubscriptionState{}
require Logger
def new(stream_uuid, subscription_name, opts) do
new(
data: %SubscriptionState{
conn: Keyword.fetch!(opts, :conn),
event_store: Keyword.fetch!(opts, :event_store),
stream_uuid: stream_uuid,
subscription_name: subscription_name,
registry: Keyword.fetch!(opts, :registry),
serializer: Keyword.fetch!(opts, :serializer),
start_from: opts[:start_from] || 0,
mapper: opts[:mapper],
selector: opts[:selector],
partition_by: opts[:partition_by],
buffer_size: opts[:buffer_size] || 1,
max_size: opts[:max_size] || 1_000
}
)
end
# The main flow between states in this finite state machine is:
#
# initial -> request_catch_up -> catching_up -> subscribed
#
defstate initial do
defevent subscribe,
data: %SubscriptionState{} = data do
data = %SubscriptionState{
data
| queue_size: 0,
partitions: %{},
processed_event_numbers: MapSet.new()
}
with {:ok, subscription} <- create_subscription(data),
{:ok, lock_ref} <- try_acquire_exclusive_lock(data, subscription),
:ok <- subscribe_to_events(data) do
%Storage.Subscription{subscription_id: subscription_id, last_seen: last_seen} =
subscription
last_seen = last_seen || 0
data = %SubscriptionState{
data
| subscription_id: subscription_id,
lock_ref: lock_ref,
last_received: last_seen,
last_sent: last_seen,
last_ack: last_seen
}
notify_subscribed(data)
next_state(:request_catch_up, data)
else
_ ->
# Failed to subscribe to stream, retry after delay
next_state(:initial, data)
end
end
end
defstate request_catch_up do
defevent catch_up, data: %SubscriptionState{} = data do
catch_up_from_stream(data)
end
defevent ack(ack, subscriber), data: %SubscriptionState{} = data do
with {:ok, data} <- ack_events(data, ack, subscriber) do
catch_up_from_stream(data)
else
reply -> respond(reply)
end
end
end
defstate catching_up do
defevent ack(ack, subscriber), data: %SubscriptionState{} = data do
with {:ok, data} <- ack_events(data, ack, subscriber) do
catch_up_from_stream(data)
else
reply -> respond(reply)
end
end
end
defstate subscribed do
# Notify events when subscribed
defevent notify_events(events), data: %SubscriptionState{} = data do
%SubscriptionState{last_received: last_received} = data
expected_event = last_received + 1
case first_event_number(events) do
past when past < expected_event ->
Logger.debug(fn -> describe(data) <> " received past event(s), ignoring" end)
# Ignore already seen events
next_state(:subscribed, data)
future when future > expected_event ->
Logger.debug(fn ->
describe(data) <> " received unexpected event(s), requesting catch up"
end)
# Missed event(s), request catch-up with any unseen events from storage
next_state(:request_catch_up, data)
^expected_event ->
Logger.debug(fn ->
describe(data) <> " is enqueueing #{length(events)} event(s)"
end)
# Subscriber is up-to-date, so enqueue events to send
data =
data
|> enqueue_events(events)
|> notify_subscribers()
if over_capacity?(data) do
# Too many pending events, must wait for these to be processed.
next_state(:max_capacity, data)
else
# Remain subscribed, waiting for subscriber to ack already sent events.
next_state(:subscribed, data)
end
end
end
defevent ack(ack, subscriber), data: %SubscriptionState{} = data do
with {:ok, data} <- ack_events(data, ack, subscriber) do
next_state(:subscribed, data)
else
reply -> respond(reply)
end
end
defevent catch_up, data: %SubscriptionState{} = data do
next_state(:request_catch_up, data)
end
end
defstate max_capacity do
defevent ack(ack, subscriber), data: %SubscriptionState{} = data do
with {:ok, data} <- ack_events(data, ack, subscriber) do
if empty_queue?(data) do
# No further pending events so catch up with any unseen.
next_state(:request_catch_up, data)
else
# Pending events remain, wait until subscriber ack's.
next_state(:max_capacity, data)
end
else
reply -> respond(reply)
end
end
end
defstate disconnected do
# Attempt to subscribe
defevent subscribe, data: %SubscriptionState{} = data do
with {:ok, subscription} <- create_subscription(data),
{:ok, lock_ref} <- try_acquire_exclusive_lock(data, subscription) do
%Storage.Subscription{
subscription_id: subscription_id,
last_seen: last_seen
} = subscription
last_ack = last_seen || 0
data = %SubscriptionState{
data
| subscription_id: subscription_id,
lock_ref: lock_ref,
last_sent: last_ack,
last_ack: last_ack
}
next_state(:request_catch_up, data)
else
_ ->
next_state(:disconnected, data)
end
end
end
defstate unsubscribed do
defevent unsubscribe(_subscriber), data: %SubscriptionState{} = data do
next_state(:unsubscribed, data)
end
end
# Catch-all event handlers
defevent ack(_ack, _subscriber), data: %SubscriptionState{} = data, state: state do
next_state(state, data)
end
defevent connect_subscriber(subscriber, opts),
data: %SubscriptionState{} = data,
state: state do
data = data |> monitor_subscriber(subscriber, opts) |> notify_subscribers()
unless state == :initial do
notify_subscribed(subscriber)
end
next_state(state, data)
end
defevent subscribe, data: %SubscriptionState{} = data, state: state do
next_state(state, data)
end
# Ignore notify events unless subscribed
defevent notify_events(events), data: %SubscriptionState{} = data, state: state do
next_state(state, track_last_received(data, events))
end
defevent catch_up, data: %SubscriptionState{} = data, state: state do
next_state(state, data)
end
defevent disconnect(lock_ref), data: %SubscriptionState{lock_ref: lock_ref} = data do
data =
%SubscriptionState{
data
| lock_ref: nil,
queue_size: 0,
partitions: %{},
processed_event_numbers: MapSet.new()
}
|> purge_in_flight_events()
next_state(:disconnected, data)
end
defevent unsubscribe(pid), data: %SubscriptionState{} = data, state: state do
data = data |> remove_subscriber(pid) |> notify_subscribers()
case has_subscribers?(data) do
true ->
next_state(state, data)
false ->
next_state(:unsubscribed, data)
end
end
defp create_subscription(%SubscriptionState{} = data) do
%SubscriptionState{
conn: conn,
start_from: start_from,
stream_uuid: stream_uuid,
subscription_name: subscription_name
} = data
Storage.Subscription.subscribe_to_stream(
conn,
stream_uuid,
subscription_name,
start_from
)
end
defp try_acquire_exclusive_lock(
%SubscriptionState{} = data,
%Storage.Subscription{} = subscription
) do
%Storage.Subscription{subscription_id: subscription_id} = subscription
%SubscriptionState{event_store: event_store} = data
server = Module.concat(event_store, AdvisoryLocks)
AdvisoryLocks.try_advisory_lock(server, subscription_id)
end
defp subscribe_to_events(%SubscriptionState{} = data) do
%SubscriptionState{event_store: event_store, registry: registry, stream_uuid: stream_uuid} =
data
Registration.subscribe(event_store, registry, stream_uuid)
end
defp monitor_subscriber(%SubscriptionState{} = data, pid, opts) when is_pid(pid) do
%SubscriptionState{subscribers: subscribers, buffer_size: buffer_size} = data
subscriber = %Subscriber{
pid: pid,
ref: Process.monitor(pid),
buffer_size: Keyword.get(opts, :buffer_size, buffer_size)
}
%SubscriptionState{data | subscribers: Map.put(subscribers, pid, subscriber)}
end
defp remove_subscriber(%SubscriptionState{subscribers: subscribers} = data, subscriber_pid)
when is_pid(subscriber_pid) do
case subscriber_by_pid(subscribers, subscriber_pid) do
{:ok, %Subscriber{} = subscriber} ->
%Subscriber{in_flight: in_flight} = subscriber
# Prepend in-flight events for the removed subscriber to the pending
# event queue so they can be sent to another available subscriber.
data =
in_flight
|> Enum.sort_by(fn %RecordedEvent{event_number: event_number} -> -event_number end)
|> Enum.reduce(data, fn event, data ->
enqueue_event(data, event, &:queue.in_r/2)
end)
%SubscriptionState{data | subscribers: Map.delete(subscribers, subscriber_pid)}
{:error, :unknown_subscriber} ->
data
end
end
defp has_subscribers?(%SubscriptionState{subscribers: subscribers}), do: subscribers != %{}
# Notify all connected subscribers that this subscription has successfully subscribed.
defp notify_subscribed(%SubscriptionState{subscribers: subscribers}) do
for {pid, _subscriber} <- subscribers do
notify_subscribed(pid)
end
:ok
end
defp notify_subscribed(subscriber) when is_pid(subscriber) do
send(subscriber, {:subscribed, self()})
end
defp track_last_received(%SubscriptionState{} = data, events) when is_list(events) do
track_last_received(data, last_event_number(events))
end
defp track_last_received(%SubscriptionState{} = data, event_number)
when is_number(event_number) do
%SubscriptionState{last_received: last_received} = data
%SubscriptionState{data | last_received: max(last_received, event_number)}
end
defp track_last_sent(%SubscriptionState{} = data, event_number) do
%SubscriptionState{last_sent: last_sent} = data
%SubscriptionState{data | last_sent: max(last_sent, event_number)}
end
defp first_event_number([%RecordedEvent{event_number: event_number} | _]), do: event_number
defp last_event_number([%RecordedEvent{event_number: event_number}]), do: event_number
defp last_event_number([_event | events]), do: last_event_number(events)
def catch_up_from_stream(%SubscriptionState{queue_size: 0} = data) do
%SubscriptionState{last_sent: last_sent, last_received: last_received} = data
case read_stream_forward(data) do
{:ok, []} ->
if last_sent == last_received do
# Subscriber is up-to-date with latest published events
next_state(:subscribed, data)
else
# Need to catch-up with events published while catching up
next_state(:request_catch_up, data)
end
{:ok, events} ->
data = data |> enqueue_events(events) |> notify_subscribers()
if empty_queue?(data) do
# Request next batch of events
next_state(:request_catch_up, data)
else
# Wait until subscribers have ack'd in-flight events
next_state(:catching_up, data)
end
{:error, :stream_deleted} ->
# Don't allow subscriptions to deleted streams to receive any events
next_state(:unsubscribed, data)
{:error, :stream_not_found} ->
# Allow subscriptions to streams which don't yet exist, but might be created later
next_state(:subscribed, data)
end
end
def catch_up_from_stream(%SubscriptionState{} = data) do
next_state(:catching_up, data)
end
defp read_stream_forward(%SubscriptionState{} = data) do
%SubscriptionState{
conn: conn,
serializer: serializer,
stream_uuid: stream_uuid,
last_sent: last_sent,
max_size: max_size
} = data
Stream.read_stream_forward(conn, stream_uuid, last_sent + 1, max_size, serializer: serializer)
end
defp enqueue_events(%SubscriptionState{} = data, []), do: data
defp enqueue_events(%SubscriptionState{} = data, [event | events]) do
%SubscriptionState{processed_event_numbers: processed_event_numbers} = data
%RecordedEvent{event_number: event_number} = event
data =
case selected?(event, data) do
true ->
# Unfiltered event, enqueue to send to a subscriber
enqueue_event(data, event)
false ->
# Filtered event, don't send to subscriber, but track it as processed.
%SubscriptionState{
data
| processed_event_numbers: MapSet.put(processed_event_numbers, event_number)
}
|> track_last_sent(event_number)
end
data
|> track_last_received(event_number)
|> enqueue_events(events)
end
defp enqueue_event(%SubscriptionState{} = data, event, enqueue \\ &:queue.in/2) do
%SubscriptionState{partitions: partitions, queue_size: queue_size} = data
partition_key = partition_key(data, event)
partitions =
partitions
|> Map.put_new(partition_key, :queue.new())
|> Map.update!(partition_key, fn pending_events -> enqueue.(event, pending_events) end)
%SubscriptionState{data | partitions: partitions, queue_size: queue_size + 1}
end
def partition_key(%SubscriptionState{partition_by: nil}, %RecordedEvent{}), do: nil
def partition_key(%SubscriptionState{partition_by: partition_by}, %RecordedEvent{} = event)
when is_function(partition_by, 1),
do: partition_by.(event)
# Attempt to notify subscribers with any pending events. Partitions are
# selected by peeking at the event number of their queue to ensure earlier
# events are sent first.
defp notify_subscribers(%SubscriptionState{partitions: partitions} = data) do
partitions
|> Enum.sort_by(fn {_partition_key, pending_events} -> peek_event_number(pending_events) end)
|> Enum.reduce(data, fn {partition_key, _pending_events}, data ->
notify_partition_subscriber(data, partition_key)
end)
|> checkpoint_last_seen()
end
defp peek_event_number(pending_events) do
case :queue.peek(pending_events) do
{:value, %RecordedEvent{event_number: event_number}} -> event_number
_ -> nil
end
end
defp notify_partition_subscriber(data, partition_key, events_to_send \\ []) do
%SubscriptionState{
partitions: partitions,
subscribers: subscribers,
queue_size: queue_size
} = data
with pending_events when not is_nil(pending_events) <- Map.get(partitions, partition_key),
{{:value, event}, pending_events} <- :queue.out(pending_events),
{:ok, subscriber} <- next_available_subscriber(data, partition_key) do
%RecordedEvent{event_number: event_number} = event
%Subscriber{pid: subscriber_pid} = subscriber
subscriber = Subscriber.track_in_flight(subscriber, event, partition_key)
partitions =
case :queue.is_empty(pending_events) do
true -> Map.delete(partitions, partition_key)
false -> Map.put(partitions, partition_key, pending_events)
end
%SubscriptionState{
data
| partitions: partitions,
subscribers: Map.put(subscribers, subscriber_pid, subscriber),
queue_size: max(queue_size - 1, 0)
}
|> track_last_sent(event_number)
|> notify_partition_subscriber(partition_key, [{subscriber_pid, event} | events_to_send])
else
_ ->
# No further queued event or available subscriber, send ready events to
# subscribers then stop notifying.
send_queued_events(events_to_send, data)
end
end
# Send events to the subscriber
defp send_queued_events([], data), do: data
defp send_queued_events(events_to_send, data) do
events_to_send
|> Enum.group_by(fn {pid, _event} -> pid end, fn {_pid, event} -> event end)
|> Enum.each(fn {pid, events} ->
mapped_events = events |> Enum.reverse() |> map(data)
send(pid, {:events, mapped_events})
end)
data
end
# Select the next available subscriber based upon their partition key, buffer
# size and number of currently in-flight events.
#
# Uses a round robin strategy for balancing events between subscribers.
#
# Events will be distributed to subscribers based upon their partition key
# when a `partition_by/1` function is provided. This is used to guarantee
# ordering of events for each partition.
defp next_available_subscriber(%SubscriptionState{} = data, partition_key) do
%SubscriptionState{subscribers: subscribers} = data
partition_subscriber =
Enum.find(subscribers, fn {_pid, subscriber} ->
Subscriber.in_partition?(subscriber, partition_key)
end)
subscribers =
case partition_subscriber do
nil -> subscribers
subscriber -> [subscriber]
end
subscribers
|> Enum.sort_by(fn {_pid, %Subscriber{last_sent: last_sent}} -> last_sent end)
|> Enum.find(fn {_pid, subscriber} -> Subscriber.available?(subscriber) end)
|> case do
nil -> {:error, :no_available_subscriber}
{_pid, subscriber} -> {:ok, subscriber}
end
end
defp selected?(event, %SubscriptionState{selector: selector}) when is_function(selector, 1),
do: selector.(event)
defp selected?(_event, %SubscriptionState{}), do: true
defp map(events, %SubscriptionState{mapper: mapper}) when is_function(mapper, 1),
do: Enum.map(events, mapper)
defp map(events, _mapper), do: events
defp ack_events(%SubscriptionState{} = data, ack, subscriber_pid) do
%SubscriptionState{subscribers: subscribers, processed_event_numbers: processed_event_numbers} =
data
with {:ok, subscriber} <- subscriber_by_pid(subscribers, subscriber_pid),
{:ok, subscriber, acknowledged_events} <- Subscriber.acknowledge(subscriber, ack) do
processed_event_numbers =
acknowledged_events
|> Enum.map(& &1.event_number)
|> Enum.reduce(processed_event_numbers, &MapSet.put(&2, &1))
data =
%SubscriptionState{
data
| subscribers: Map.put(subscribers, subscriber_pid, subscriber),
processed_event_numbers: processed_event_numbers
}
|> notify_subscribers()
{:ok, data}
end
end
defp subscriber_by_pid(subscribers, subscriber_pid) do
case Map.get(subscribers, subscriber_pid) do
%Subscriber{} = subscriber -> {:ok, subscriber}
nil -> {:error, :unknown_subscriber}
end
end
defp checkpoint_last_seen(%SubscriptionState{} = data, persist \\ false) do
%SubscriptionState{
conn: conn,
stream_uuid: stream_uuid,
subscription_name: subscription_name,
processed_event_numbers: processed_event_numbers,
last_ack: last_ack
} = data
ack = last_ack + 1
cond do
MapSet.member?(processed_event_numbers, ack) ->
%SubscriptionState{
data
| processed_event_numbers: MapSet.delete(processed_event_numbers, ack),
last_ack: ack
}
|> checkpoint_last_seen(true)
persist ->
Storage.Subscription.ack_last_seen_event(conn, stream_uuid, subscription_name, last_ack)
data
true ->
data
end
end
# Purge all subscriber in-flight events and subscription event queue.
defp purge_in_flight_events(%SubscriptionState{} = data) do
%SubscriptionState{subscribers: subscribers} = data
subscribers =
Enum.reduce(subscribers, %{}, fn {pid, subscriber}, acc ->
Map.put(acc, pid, Subscriber.reset_in_flight(subscriber))
end)
%SubscriptionState{data | subscribers: subscribers}
end
defp empty_queue?(%SubscriptionState{queue_size: 0}), do: true
defp empty_queue?(%SubscriptionState{}), do: false
defp over_capacity?(%SubscriptionState{queue_size: queue_size, max_size: max_size}),
do: queue_size >= max_size
defp describe(%SubscriptionState{stream_uuid: stream_uuid, subscription_name: name}),
do: "Subscription #{inspect(name)}@#{inspect(stream_uuid)}"
end
| 31.765244 | 100 | 0.66897 |
03d2df89848cfe739c97cdf158a3618ffda7928f | 300 | exs | Elixir | farmbot_core/test/test_helper.exs | EarthEngineering/facetop-os | c82a7f1e8098d3a03dddbd2f2cb46cda7b88b6fb | [
"MIT"
] | 1 | 2021-04-22T10:18:50.000Z | 2021-04-22T10:18:50.000Z | farmbot_core/test/test_helper.exs | bluewaysw/farmbot_os | 3449864bc5c17a688ec2fe75e4a5cf247da57806 | [
"MIT"
] | null | null | null | farmbot_core/test/test_helper.exs | bluewaysw/farmbot_os | 3449864bc5c17a688ec2fe75e4a5cf247da57806 | [
"MIT"
] | null | null | null | Application.ensure_all_started(:mimic)
tz = System.get_env("TZ") || Timex.local().time_zone
FarmbotCore.Asset.Device.changeset(FarmbotCore.Asset.device(), %{timezone: tz})
|> FarmbotCore.Asset.Repo.insert_or_update!()
Mimic.copy(FarmbotCeleryScript.SysCalls.Stubs)
Mimic.copy(Timex)
ExUnit.start()
| 30 | 79 | 0.783333 |
03d2e5c0827c1f956f28b5711f5d09d38b1720f3 | 755 | exs | Elixir | basics/5-lists-and-maps/simple_examples_test.exs | mrice/elixir-examples | bca08b7378f5cc39e31467767484c54553d8fbe7 | [
"MIT"
] | null | null | null | basics/5-lists-and-maps/simple_examples_test.exs | mrice/elixir-examples | bca08b7378f5cc39e31467767484c54553d8fbe7 | [
"MIT"
] | null | null | null | basics/5-lists-and-maps/simple_examples_test.exs | mrice/elixir-examples | bca08b7378f5cc39e31467767484c54553d8fbe7 | [
"MIT"
] | null | null | null | Code.load_file("simple_examples.exs")
ExUnit.start
defmodule ListMapExamplesTest do
use ExUnit.Case
test "test add two lists" do
# trivial list addition
assert ListMapExamples.add_kw_list([{:a, 1}], [{:b, 2}]) == [{:a, 1}, {:b, 2}]
end
test "demo keyword list as argument parameter" do
# order matters a lot, but sometimes nice to have named parameters (matching makes this work, btw)
assert ListMapExamples.say_hello first_name: "michael", last_name: "rice" == "Hello, michael rice"
end
test "demo map version of say_hello" do
data = %{:lname => "rice", :fname => "michael"} # it's a map, so order doesn't matter like it does with lists
assert ListMapExamples.say_hello(data) == "Hello, michael rice"
end
end
| 30.2 | 113 | 0.690066 |
03d2eb9c9729aa5149f05a74698a132beaa5670d | 2,234 | exs | Elixir | apps/fz_http/test/fz_http_web/live/account_live/show_test.exs | jasonboukheir/firezone | 79d610b94f67ae25c8ca26f391c0edf288f6aaa5 | [
"Apache-2.0"
] | null | null | null | apps/fz_http/test/fz_http_web/live/account_live/show_test.exs | jasonboukheir/firezone | 79d610b94f67ae25c8ca26f391c0edf288f6aaa5 | [
"Apache-2.0"
] | null | null | null | apps/fz_http/test/fz_http_web/live/account_live/show_test.exs | jasonboukheir/firezone | 79d610b94f67ae25c8ca26f391c0edf288f6aaa5 | [
"Apache-2.0"
] | null | null | null | defmodule FzHttpWeb.AccountLive.ShowTest do
use FzHttpWeb.ConnCase, async: true
alias FzHttp.{Users, Users.User}
alias FzHttpWeb.AccountLive.FormComponent
describe "when unauthenticated" do
test "mount redirects to session path", %{unauthed_conn: conn} do
path = Routes.account_show_path(conn, :show)
expected_path = Routes.session_path(conn, :new)
assert {:error, {:redirect, %{to: ^expected_path}}} = live(conn, path)
end
end
describe "when live_action is show" do
test "shows account details", %{authed_conn: conn} do
path = Routes.account_show_path(conn, :show)
{:ok, _view, html} = live(conn, path)
user = Users.get_user!(get_session(conn, :user_id))
assert html =~ "Delete your account"
assert html =~ user.email
end
end
describe "when live_action is edit" do
@valid_params %{"user" => %{"email" => "foobar@test"}}
@invalid_params %{"user" => %{"email" => "foobar"}}
test "loads the form" do
assert render_component(FormComponent, id: :test, user: %User{}) =~
"Change email or enter new password below"
end
test "saves email when submitted", %{authed_conn: conn} do
path = Routes.account_show_path(conn, :edit)
{:ok, view, _html} = live(conn, path)
view
|> element("#account-edit")
|> render_submit(@valid_params)
flash = assert_redirected(view, Routes.account_show_path(conn, :show))
assert flash["info"] == "Account updated successfully."
end
test "renders validation errors", %{authed_conn: conn} do
path = Routes.account_show_path(conn, :edit)
{:ok, view, _html} = live(conn, path)
test_view =
view
|> element("#account-edit")
|> render_submit(@invalid_params)
assert test_view =~ "has invalid format"
end
test "closes modal", %{authed_conn: conn} do
path = Routes.account_show_path(conn, :edit)
{:ok, view, _html} = live(conn, path)
view
|> element("button.delete")
|> render_click()
# Sometimes assert_patched fails without this :-(
Process.sleep(100)
assert_patched(view, Routes.account_show_path(conn, :show))
end
end
end
| 29.786667 | 76 | 0.640555 |
03d2f770236fc02a067f95aa37827fe374f0ade8 | 723 | ex | Elixir | lib/router/user_playlist.ex | fimars/netease_music_api | 821f19782f482a092b10e3cc39c93f6c131e9075 | [
"MIT"
] | 3 | 2017-12-25T02:40:05.000Z | 2019-05-09T04:01:24.000Z | lib/router/user_playlist.ex | fimars/netease_music_api | 821f19782f482a092b10e3cc39c93f6c131e9075 | [
"MIT"
] | null | null | null | lib/router/user_playlist.ex | fimars/netease_music_api | 821f19782f482a092b10e3cc39c93f6c131e9075 | [
"MIT"
] | null | null | null | defmodule Router.User.Playlist do
@moduledoc """
用户播放列表
**Path:** `/user/playlist`
**Query:**
- `uid`: 用户id
**Example**
- `/user/playlist?uid=350652322`
"""
import Plug.Conn
import Helpers.Util
def init(options), do: options
def call(conn, _opts) do
body =
conn
|> get_req_header("cookie")
|> dispatch(conn.query_params)
|> Map.get(:body)
conn
|> send_resp(200, body)
end
def dispatch(cookie, %{"uid" => uid}) do
data = %{
"uid" => uid,
"offset" => 0,
"limit" => 1000,
"csrf_token" => ''
}
createWebRequest(
:post,
"music.163.com",
"/weapi/user/playlist",
data,
cookie
)
end
end
| 15.717391 | 42 | 0.532503 |
03d328e116cf59c2bf1f59dcd9fdc99d245e4cb1 | 652 | ex | Elixir | lib/faktory_tutorial/application.ex | nathanbegbie/learn-elixir-faktory | 5a77b494bb6d61373a1b195119b69dd47a2404fc | [
"MIT"
] | null | null | null | lib/faktory_tutorial/application.ex | nathanbegbie/learn-elixir-faktory | 5a77b494bb6d61373a1b195119b69dd47a2404fc | [
"MIT"
] | null | null | null | lib/faktory_tutorial/application.ex | nathanbegbie/learn-elixir-faktory | 5a77b494bb6d61373a1b195119b69dd47a2404fc | [
"MIT"
] | null | null | null | defmodule FaktoryTutorial.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
@impl true
def start(_type, _args) do
children = [
# Starts a worker by calling: FaktoryTutorial.Worker.start_link(arg)
# {FaktoryTutorial.Worker, arg}
FaktoryTutorial.FaktoryClient,
FaktoryTutorial.FaktoryWorker,
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: FaktoryTutorial.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 28.347826 | 74 | 0.726994 |
03d35927689244c9738bf19b93452a5a1c152c11 | 722 | exs | Elixir | mix.exs | talentdeficit/ssv | 77d5253952981e89d3d5d0ccd48c9c56fa683a0b | [
"MIT"
] | null | null | null | mix.exs | talentdeficit/ssv | 77d5253952981e89d3d5d0ccd48c9c56fa683a0b | [
"MIT"
] | null | null | null | mix.exs | talentdeficit/ssv | 77d5253952981e89d3d5d0ccd48c9c56fa683a0b | [
"MIT"
] | null | null | null | defmodule SSV.Mixfile do
use Mix.Project
def project do
[
app: :ssv,
version: "0.1.0",
description: "parse comma separated values. or tab separated values. or any kind of separated values",
deps: deps(Mix.env),
package: package,
language: :erlang,
erlc_options: opts(Mix.env)
]
end
defp opts(:dev), do: [d: :TEST] ++ opts(:prod)
defp opts(_), do: []
defp deps(_), do: [{:mixunit, "~> 0.9.2", only: :dev}]
defp package do
[
files: [
"LICENSE",
"mix.exs",
"src"
],
contributors: ["alisdair sullivan"],
links: %{"github" => "https://github.com/talentdeficit/ssv"},
licenses: ["MIT"]
]
end
end
| 21.235294 | 108 | 0.548476 |
03d365fb4dd278a71314dd5b925a46ca4e4b2bec | 1,060 | exs | Elixir | mix.exs | MikaAK/elixir_error_message | da17e91e8272435ad06152ffa32802e15f2d497a | [
"MIT"
] | 9 | 2021-11-05T06:19:26.000Z | 2022-03-14T22:27:54.000Z | mix.exs | MikaAK/elixir_error_message | da17e91e8272435ad06152ffa32802e15f2d497a | [
"MIT"
] | null | null | null | mix.exs | MikaAK/elixir_error_message | da17e91e8272435ad06152ffa32802e15f2d497a | [
"MIT"
] | null | null | null | defmodule ErrorMessage.MixProject do
use Mix.Project
def project do
[
app: :error_message,
version: "0.1.4",
elixir: "~> 1.7",
start_permanent: Mix.env() == :prod,
deps: deps(),
description: "Error system to help make errors consistent across your system",
docs: docs(),
package: package(),
preferred_cli_env: [dialyzer: :test],
dialyzer: [plt_add_apps: [:jason]]
]
end
def application do
[
extra_applications: [:logger]
]
end
defp deps do
[
{:ex_doc, ">= 0.0.0", only: :dev},
{:jason, ">= 1.0.0", optional: true},
{:dialyxir, "~> 1.0", only: :test, runtime: false}
]
end
defp package do
[
maintainers: ["Mika Kalathil"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/MikaAK/elixir_error_message"},
files: ~w(mix.exs README.md CHANGELOG.md lib)
]
end
defp docs do
[
main: "ErrorMessage",
source_url: "https://github.com/MikaAK/elixir_error_message"
]
end
end
| 21.632653 | 84 | 0.572642 |
03d371713542038ed0d39e6e554668b134e00d5b | 7,207 | exs | Elixir | lib/elixir/test/elixir/io_test.exs | spencerdcarlson/elixir | 23d75ecdf58df80969e12f4420282238e19219a1 | [
"Apache-2.0"
] | 2 | 2020-06-02T18:00:28.000Z | 2021-12-10T03:21:42.000Z | lib/elixir/test/elixir/io_test.exs | spencerdcarlson/elixir | 23d75ecdf58df80969e12f4420282238e19219a1 | [
"Apache-2.0"
] | 1 | 2020-09-14T16:23:33.000Z | 2021-03-25T17:38:59.000Z | lib/elixir/test/elixir/io_test.exs | spencerdcarlson/elixir | 23d75ecdf58df80969e12f4420282238e19219a1 | [
"Apache-2.0"
] | 1 | 2018-01-09T20:10:59.000Z | 2018-01-09T20:10:59.000Z | Code.require_file("test_helper.exs", __DIR__)
defmodule IOTest do
use ExUnit.Case, async: true
doctest IO
import ExUnit.CaptureIO
test "read with count" do
{:ok, file} = File.open(Path.expand('fixtures/file.txt', __DIR__), [:charlist])
assert 'FOO' == IO.read(file, 3)
assert File.close(file) == :ok
end
test "read with UTF-8 and binary" do
{:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__), [:utf8])
assert "Русский" == IO.read(file, 7)
assert File.close(file) == :ok
end
test "read all charlist" do
{:ok, file} = File.open(Path.expand('fixtures/multiline_file.txt', __DIR__), [:charlist])
assert 'this is the first line\nthis is the second line\n' == IO.read(file, :all)
assert File.close(file) == :ok
end
test "read empty file" do
{:ok, file} = File.open(Path.expand('fixtures/cp_mode', __DIR__), [])
assert IO.read(file, :all) == ""
assert File.close(file) == :ok
{:ok, file} = File.open(Path.expand('fixtures/cp_mode', __DIR__), [:charlist])
assert IO.read(file, :all) == ''
assert File.close(file) == :ok
end
test "binread" do
{:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__))
assert "Русский" == IO.binread(file, 14)
assert File.close(file) == :ok
end
test "binread all" do
{:ok, file} = File.open(Path.expand('fixtures/file.bin', __DIR__))
assert "LF\nCR\rCRLF\r\nLFCR\n\r" == IO.binread(file, :all)
assert File.close(file) == :ok
end
test "getn" do
{:ok, file} = File.open(Path.expand('fixtures/file.txt', __DIR__))
assert "F" == IO.getn(file, "")
assert "O" == IO.getn(file, "")
assert "O" == IO.getn(file, "")
assert "\n" == IO.getn(file, "")
assert :eof == IO.getn(file, "")
assert File.close(file) == :ok
end
test "getn with count" do
{:ok, file} = File.open(Path.expand('fixtures/file.txt', __DIR__), [:charlist])
assert 'F' == IO.getn(file, "λ")
assert 'OO' == IO.getn(file, "", 2)
assert '\n' == IO.getn(file, "λ", 99)
assert File.close(file) == :ok
end
test "getn with UTF-8 and binary" do
{:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__), [:utf8])
assert "Русский" == IO.getn(file, "", 7)
assert File.close(file) == :ok
end
test "gets" do
{:ok, file} = File.open(Path.expand('fixtures/file.txt', __DIR__), [:charlist])
assert 'FOO\n' == IO.gets(file, "")
assert :eof == IO.gets(file, "")
assert File.close(file) == :ok
end
test "gets with UTF-8 and binary" do
{:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__), [:utf8])
assert "Русский\n" == IO.gets(file, "")
assert "日\n" == IO.gets(file, "")
assert File.close(file) == :ok
end
test "readall" do
{:ok, file} = File.open(Path.expand('fixtures/file.txt', __DIR__))
assert "FOO\n" == IO.read(file, :all)
assert "" == IO.read(file, :all)
assert File.close(file) == :ok
end
test "readall with UTF-8 and binary" do
{:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__), [:utf8])
assert "Русский\n日\n" == IO.read(file, :all)
assert "" == IO.read(file, :all)
assert File.close(file) == :ok
end
test "readline" do
{:ok, file} = File.open(Path.expand('fixtures/file.txt', __DIR__))
assert "FOO\n" == IO.read(file, :line)
assert :eof == IO.read(file, :line)
assert File.close(file) == :ok
end
test "readline with UTF-8 and binary" do
{:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__), [:utf8])
assert "Русский\n" == IO.read(file, :line)
assert "日\n" == IO.read(file, :line)
assert File.close(file) == :ok
end
test "binreadall" do
{:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__))
assert "Русский\n日\n" == IO.binread(file, :all)
assert "" == IO.binread(file, :all)
assert File.close(file) == :ok
end
test "binreadline" do
{:ok, file} = File.open(Path.expand('fixtures/utf8.txt', __DIR__))
assert "Русский\n" == IO.binread(file, :line)
assert "日\n" == IO.binread(file, :line)
assert File.close(file) == :ok
end
test "puts with chardata" do
assert capture_io(fn -> IO.puts("hello") end) == "hello\n"
assert capture_io(fn -> IO.puts('hello') end) == "hello\n"
assert capture_io(fn -> IO.puts(:hello) end) == "hello\n"
assert capture_io(fn -> IO.puts(13) end) == "13\n"
end
test "warn with chardata" do
assert capture_io(:stderr, fn -> IO.warn("hello") end) =~
"hello\n (ex_unit #{System.version()}) lib/ex_unit"
assert capture_io(:stderr, fn -> IO.warn('hello') end) =~
"hello\n (ex_unit #{System.version()}) lib/ex_unit"
assert capture_io(:stderr, fn -> IO.warn(:hello) end) =~
"hello\n (ex_unit #{System.version()}) lib/ex_unit"
assert capture_io(:stderr, fn -> IO.warn(13) end) =~
"13\n (ex_unit #{System.version()}) lib/ex_unit"
assert capture_io(:stderr, fn -> IO.warn("hello", []) end) =~ "hello\n"
stacktrace = [{IEx.Evaluator, :eval, 4, [file: 'lib/iex/evaluator.ex', line: 108]}]
assert capture_io(:stderr, fn -> IO.warn("hello", stacktrace) end) =~ """
hello
lib/iex/evaluator.ex:108: IEx.Evaluator.eval/4
"""
end
test "write with chardata" do
assert capture_io(fn -> IO.write("hello") end) == "hello"
assert capture_io(fn -> IO.write('hello') end) == "hello"
assert capture_io(fn -> IO.write(:hello) end) == "hello"
assert capture_io(fn -> IO.write(13) end) == "13"
end
test "gets with chardata" do
assert capture_io("foo\n", fn -> IO.gets("hello") end) == "hello"
assert capture_io("foo\n", fn -> IO.gets('hello') end) == "hello"
assert capture_io("foo\n", fn -> IO.gets(:hello) end) == "hello"
assert capture_io("foo\n", fn -> IO.gets(13) end) == "13"
end
test "getn with chardata" do
assert capture_io("foo\n", fn -> IO.getn("hello", 3) end) == "hello"
assert capture_io("foo\n", fn -> IO.getn('hello', 3) end) == "hello"
assert capture_io("foo\n", fn -> IO.getn(:hello, 3) end) == "hello"
assert capture_io("foo\n", fn -> IO.getn(13, 3) end) == "13"
end
test "getn with different arities" do
assert capture_io("hello", fn ->
input = IO.getn(">")
IO.write(input)
end) == ">h"
assert capture_io("hello", fn ->
input = IO.getn(">", 3)
IO.write(input)
end) == ">hel"
assert capture_io("hello", fn ->
input = IO.getn(Process.group_leader(), ">")
IO.write(input)
end) == ">h"
assert capture_io("hello", fn ->
input = IO.getn(Process.group_leader(), ">")
IO.write(input)
end) == ">h"
assert capture_io("hello", fn ->
input = IO.getn(Process.group_leader(), ">", 99)
IO.write(input)
end) == ">hello"
end
test "inspect" do
assert capture_io(fn -> IO.inspect(1) end) == "1\n"
assert capture_io(fn -> IO.inspect(1, label: "foo") end) == "foo: 1\n"
assert capture_io(fn -> IO.inspect(1, label: :foo) end) == "foo: 1\n"
end
end
| 33.67757 | 93 | 0.585819 |
03d3c51654ceccca87cc37fee92ead47182a6f77 | 1,687 | ex | Elixir | lib/ecto/schema/metadata.ex | tcrossland/ecto | e028a90920fed27865075787d33c2ad61f45fd24 | [
"Apache-2.0"
] | 2 | 2021-02-25T15:51:16.000Z | 2021-02-25T18:42:35.000Z | lib/ecto/schema/metadata.ex | tcrossland/ecto | e028a90920fed27865075787d33c2ad61f45fd24 | [
"Apache-2.0"
] | 3 | 2021-06-20T14:51:14.000Z | 2021-06-25T00:56:11.000Z | lib/ecto/schema/metadata.ex | tcrossland/ecto | e028a90920fed27865075787d33c2ad61f45fd24 | [
"Apache-2.0"
] | 1 | 2018-06-18T14:47:58.000Z | 2018-06-18T14:47:58.000Z | defmodule Ecto.Schema.Metadata do
@moduledoc """
Stores metadata of a struct.
## State
The state of the schema is stored in the `:state` field and allows
following values:
* `:built` - the struct was constructed in memory and is not persisted
to database yet;
* `:loaded` - the struct was loaded from database and represents
persisted data;
* `:deleted` - the struct was deleted and no longer represents persisted
data.
## Source
The `:source` tracks the (table or collection) where the struct is or should
be persisted to.
## Prefix
Tracks the source prefix in the data storage.
## Context
The `:context` field represents additional state some databases require
for proper updates of data. It is not used by the built-in adapters of
`Ecto.Adapters.Postgres` and `Ecto.Adapters.MySQL`.
## Schema
The `:schema` field refers the module name for the schema this metadata belongs to.
"""
defstruct [:state, :source, :context, :schema, :prefix]
@type state :: :built | :loaded | :deleted
@type context :: any
@type t :: %__MODULE__{
context: context,
prefix: Ecto.Schema.prefix(),
schema: module,
source: Ecto.Schema.source(),
state: state
}
defimpl Inspect do
import Inspect.Algebra
def inspect(metadata, opts) do
%{source: source, prefix: prefix, state: state, context: context} = metadata
entries =
for entry <- [state, prefix, source, context],
entry != nil,
do: to_doc(entry, opts)
concat(["#Ecto.Schema.Metadata<"] ++ Enum.intersperse(entries, ", ") ++ [">"])
end
end
end
| 25.953846 | 85 | 0.643154 |
03d3e0a3d98d905f4dfb01e08c4bd7ab0b97b592 | 1,130 | ex | Elixir | test/support/factory.ex | ubudget/octopus | 84087fbb6bfd5e7611583d7c1098e18d4239f036 | [
"MIT"
] | null | null | null | test/support/factory.ex | ubudget/octopus | 84087fbb6bfd5e7611583d7c1098e18d4239f036 | [
"MIT"
] | null | null | null | test/support/factory.ex | ubudget/octopus | 84087fbb6bfd5e7611583d7c1098e18d4239f036 | [
"MIT"
] | null | null | null | defmodule Octopus.Factory do
@moduledoc """
Defines factories for test fixtures.
"""
use ExMachina.Ecto, repo: Octopus.Repo
alias Octopus.Accounts.{Request, Session, User}
alias OctopusWeb.Endpoint
alias Phoenix.Token
require Logger
def user_factory do
%User{
name: "John Q. Test",
email: sequence(:email, &"jqt.#{&1}@email.com")
}
end
defp request_salt, do: Application.fetch_env!(:octopus, :request_salt)
defp session_salt, do: Application.fetch_env!(:octopus, :session_salt)
def request_factory do
%Request{
user: build(:user),
ip: "127.0.0.1",
secure_hash: sequence("secret"),
token: ""
}
end
def session_factory do
%Session{
user: build(:user),
ip: "127.0.0.1",
secure_hash: sequence("secret"),
token: ""
}
end
def request_with_token(user) do
%{build(:request, user: user) | token: Token.sign(Endpoint, request_salt(), user.id)}
|> insert
end
def session_with_token(user) do
%{build(:session, user: user) | token: Token.sign(Endpoint, session_salt(), user.id)}
|> insert
end
end
| 23.061224 | 89 | 0.641593 |
03d429dc1075d55ea259c2accac84da4bc513aec | 1,172 | ex | Elixir | Microsoft.Azure.Management.Preview.BotService/lib/microsoft/azure/management/preview/bot_service/model/connection_setting_properties.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | 4 | 2018-09-29T03:43:15.000Z | 2021-04-01T18:30:46.000Z | Microsoft.Azure.Management.Preview.BotService/lib/microsoft/azure/management/preview/bot_service/model/connection_setting_properties.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | Microsoft.Azure.Management.Preview.BotService/lib/microsoft/azure/management/preview/bot_service/model/connection_setting_properties.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule Microsoft.Azure.Management.Preview.BotService.Model.ConnectionSettingProperties do
@moduledoc """
Properties for a Connection Setting Item
"""
@derive [Poison.Encoder]
defstruct [
:"clientId",
:"settingId",
:"clientSecret",
:"scopes",
:"serviceProviderId",
:"serviceProviderDisplayName",
:"parameters"
]
@type t :: %__MODULE__{
:"clientId" => String.t,
:"settingId" => String.t,
:"clientSecret" => String.t,
:"scopes" => String.t,
:"serviceProviderId" => String.t,
:"serviceProviderDisplayName" => String.t,
:"parameters" => [ConnectionSettingParameter]
}
end
defimpl Poison.Decoder, for: Microsoft.Azure.Management.Preview.BotService.Model.ConnectionSettingProperties do
import Microsoft.Azure.Management.Preview.BotService.Deserializer
def decode(value, options) do
value
|> deserialize(:"parameters", :list, Microsoft.Azure.Management.Preview.BotService.Model.ConnectionSettingParameter, options)
end
end
| 29.3 | 129 | 0.71587 |
03d449fbb1914c46a11db543eff4eb96f0ab65d2 | 286 | ex | Elixir | apps/ewallet_api/lib/ewallet_api/v1/serializers/json/auth_token_serializer.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | 2 | 2019-07-13T05:49:03.000Z | 2021-08-19T23:58:23.000Z | apps/ewallet_api/lib/ewallet_api/v1/serializers/json/auth_token_serializer.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_api/lib/ewallet_api/v1/serializers/json/auth_token_serializer.ex | turbo-play/ewallet | b7fee3eed62ac716f46246132c2ead1045f2e4f3 | [
"Apache-2.0"
] | 3 | 2018-05-08T17:15:42.000Z | 2021-11-10T04:08:33.000Z | defmodule EWalletAPI.V1.JSON.AuthTokenSerializer do
@moduledoc """
Serializes auth token data into V1 JSON response format.
"""
use EWalletAPI.V1
def serialize(auth_token) do
%{
object: "authentication_token",
authentication_token: auth_token
}
end
end
| 20.428571 | 58 | 0.70979 |
03d47da79c54612c3c3f87a5bb813726fb229b19 | 267 | ex | Elixir | test/fixtures/elixir/put_with_file.ex | csperando/curlconverter | 733f110e5621375701f4424299ccd72e669876f6 | [
"MIT"
] | 536 | 2021-10-06T17:21:25.000Z | 2022-03-31T13:05:48.000Z | test/fixtures/elixir/put_with_file.ex | csperando/curlconverter | 733f110e5621375701f4424299ccd72e669876f6 | [
"MIT"
] | 74 | 2021-10-08T13:57:14.000Z | 2022-03-31T06:55:39.000Z | test/fixtures/elixir/put_with_file.ex | csperando/curlconverter | 733f110e5621375701f4424299ccd72e669876f6 | [
"MIT"
] | 104 | 2021-10-06T19:36:15.000Z | 2022-03-31T07:34:04.000Z | request = %HTTPoison.Request{
method: :put,
url: "http://localhost:28139/upload",
options: [],
headers: [
{~s|Content-Type|, ~s|application/x-www-form-urlencoded|},
],
params: [],
body: {:file, ~s|new_file|}
}
response = HTTPoison.request(request)
| 20.538462 | 62 | 0.632959 |
03d49d21273a8a28900920b4e60e43e3156a2905 | 1,586 | exs | Elixir | mix.exs | Malian/surface_catalogue | 5629a496db506cdb108e0b089e4d9445efa7825f | [
"MIT"
] | null | null | null | mix.exs | Malian/surface_catalogue | 5629a496db506cdb108e0b089e4d9445efa7825f | [
"MIT"
] | null | null | null | mix.exs | Malian/surface_catalogue | 5629a496db506cdb108e0b089e4d9445efa7825f | [
"MIT"
] | null | null | null | defmodule Surface.Catalogue.MixProject do
use Mix.Project
@version "0.0.7"
def project do
[
app: :surface_catalogue,
version: @version,
elixir: "~> 1.8",
description: "An initial prototype of the Surface Catalogue",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
deps: deps(),
docs: docs(),
aliases: aliases(),
xref: [exclude: Surface.Catalogue.Playground],
package: package()
]
end
def application do
[
mod: {Surface.Catalogue.Application, []},
extra_applications: [:logger]
]
end
def catalogues do
[
"deps/surface/priv/catalogue"
]
end
defp elixirc_paths(:dev), do: ["lib"] ++ catalogues()
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp aliases do
[
setup: ["deps.get", "cmd npm install --prefix assets"],
dev: "run --no-halt dev.exs"
]
end
defp deps do
[
{:jason, "~> 1.0"},
{:html_entities, "~> 0.4"},
{:plug_cowboy, "~> 2.0"},
{:phoenix_live_reload, "~> 1.2"},
{:surface, "~> 0.3.0"},
{:ex_doc, ">= 0.19.0", only: :docs},
{:makeup_elixir, "~> 0.15.1"}
]
end
defp docs do
[
source_ref: "v#{@version}",
source_url: "https://github.com/surface-ui/surface_catalogue"
]
end
defp package do
%{
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/surface-ui/surface_catalogue"}
}
end
end
| 21.726027 | 77 | 0.56116 |
03d4a51562c457a8810090f8a7d2e232e971538c | 759 | ex | Elixir | test/protobuf/protoc/proto_gen/extension.pb.ex | addcninblue/protobuf-elixir | a18144710fe394909a37d4533bbc28a5636311d1 | [
"MIT"
] | 4 | 2021-01-16T02:21:44.000Z | 2022-03-04T18:42:18.000Z | test/protobuf/protoc/proto_gen/extension.pb.ex | addcninblue/protobuf-elixir | a18144710fe394909a37d4533bbc28a5636311d1 | [
"MIT"
] | 5 | 2020-04-07T20:22:38.000Z | 2020-09-23T02:28:36.000Z | test/protobuf/protoc/proto_gen/extension.pb.ex | addcninblue/protobuf-elixir | a18144710fe394909a37d4533bbc28a5636311d1 | [
"MIT"
] | 4 | 2020-07-22T23:38:34.000Z | 2021-03-26T18:52:54.000Z | defmodule Protobuf.Protoc.ExtTest.Foo do
@moduledoc false
use Protobuf, custom_field_options?: true, syntax: :proto2
@type t :: %__MODULE__{
a: String.t()
}
defstruct [:a]
def full_name do
"ext.Foo"
end
field :a, 1, optional: true, type: :string
end
defmodule Protobuf.Protoc.ExtTest.Dual do
@moduledoc false
use Protobuf, custom_field_options?: true, syntax: :proto2
@type t :: %__MODULE__{
a: String.t() | nil,
b: Google.Protobuf.StringValue.t() | nil
}
defstruct [:a, :b]
def full_name do
"ext.Dual"
end
field :a, 1, optional: true, type: Google.Protobuf.StringValue, options: [extype: "String.t"]
field :b, 2, optional: true, type: Google.Protobuf.StringValue
end
| 22.323529 | 95 | 0.646904 |
03d4aa9992d39cb4a33faab54761a9288d7fcd6c | 13,962 | exs | Elixir | apps/tai/test/tai/venues/asset_balance_store_test.exs | CalebOts/tai | 1603bfefa390eac89085ba18fd259b2e9fcac6c0 | [
"MIT"
] | 1 | 2019-12-19T05:16:26.000Z | 2019-12-19T05:16:26.000Z | apps/tai/test/tai/venues/asset_balance_store_test.exs | CalebOts/tai | 1603bfefa390eac89085ba18fd259b2e9fcac6c0 | [
"MIT"
] | null | null | null | apps/tai/test/tai/venues/asset_balance_store_test.exs | CalebOts/tai | 1603bfefa390eac89085ba18fd259b2e9fcac6c0 | [
"MIT"
] | null | null | null | defmodule Tai.Venues.AssetBalanceStoreTest do
use ExUnit.Case
doctest Tai.Venues.AssetBalanceStore
alias Tai.Venues.AssetBalanceStore
setup do
on_exit(fn ->
Application.stop(:tai)
end)
{:ok, _} = Application.ensure_all_started(:tai)
:ok
end
describe ".upsert" do
test "inserts the balance into the ETS table" do
balance =
struct(Tai.Venues.AssetBalance, %{
venue_id: :my_test_exchange,
account_id: :my_test_account,
asset: :btc
})
assert AssetBalanceStore.upsert(balance) == :ok
assert [{{:my_test_exchange, :my_test_account, :btc}, ^balance}] =
:ets.lookup(
AssetBalanceStore,
{:my_test_exchange, :my_test_account, :btc}
)
end
test "broadcasts an event" do
Tai.Events.firehose_subscribe()
balance = %Tai.Venues.AssetBalance{
venue_id: :my_test_exchange,
account_id: :my_test_account,
asset: :btc,
free: Decimal.new("0.00000001"),
locked: Decimal.new(2)
}
:ok = AssetBalanceStore.upsert(balance)
assert_receive {Tai.Event, %Tai.Events.UpsertAssetBalance{} = event, _}
assert event.venue_id == :my_test_exchange
assert event.account_id == :my_test_account
assert event.asset == :btc
assert event.free == Decimal.new("0.00000001")
assert event.locked == Decimal.new(2)
end
end
describe ".all" do
test "returns a list of balances" do
assert AssetBalanceStore.all() == []
balance = %Tai.Venues.AssetBalance{
venue_id: :my_test_exchange,
account_id: :my_test_account,
asset: :btc,
free: Decimal.new("1.1"),
locked: Decimal.new("2.1")
}
:ok = AssetBalanceStore.upsert(balance)
assert [^balance] = AssetBalanceStore.all()
end
end
describe ".where" do
test "returns a list of the matching balances" do
balance_1 =
struct(Tai.Venues.AssetBalance, %{
venue_id: :my_test_exchange,
account_id: :my_test_account_a,
asset: :btc,
free: Decimal.new("1.1")
})
balance_2 =
struct(Tai.Venues.AssetBalance, %{
venue_id: :my_test_exchange,
account_id: :my_test_account_b,
asset: :btc,
free: Decimal.new("2.1")
})
:ok = AssetBalanceStore.upsert(balance_1)
:ok = AssetBalanceStore.upsert(balance_2)
assert [^balance_1, ^balance_2] =
AssetBalanceStore.where(
venue_id: :my_test_exchange,
asset: :btc
)
|> Enum.sort(&(Decimal.cmp(&1.free, &2.free) == :lt))
assert [^balance_1] =
AssetBalanceStore.where(
venue_id: :my_test_exchange,
account_id: :my_test_account_a
)
end
end
describe ".find_by" do
test "returns an ok tuple with the balance" do
balance =
struct(Tai.Venues.AssetBalance, %{
venue_id: :my_test_exchange,
account_id: :my_test_account_a,
asset: :btc
})
:ok = AssetBalanceStore.upsert(balance)
assert {:ok, ^balance} =
AssetBalanceStore.find_by(
venue_id: :my_test_exchange,
account_id: :my_test_account_a
)
end
test "returns an error tuple when not found" do
assert {:error, :not_found} =
AssetBalanceStore.find_by(
venue_id: :my_test_exchange,
account_id: :my_test_account_a
)
end
end
describe ".lock" do
setup [:init_asset_balance]
defp lock(asset, min, max) do
AssetBalanceStore.lock(%AssetBalanceStore.LockRequest{
venue_id: :my_test_exchange,
account_id: :my_test_account,
asset: asset,
min: min |> Decimal.cast(),
max: max |> Decimal.cast()
})
end
test "returns max when = free balance" do
assert lock(:btc, 0, 2.1) == {:ok, Decimal.new("1.1")}
assert [balance] = AssetBalanceStore.all()
assert balance.free == Decimal.new("0.0")
assert balance.locked == Decimal.new("3.2")
end
test "returns max when < free balance" do
assert lock(:btc, 0, 1.0) == {:ok, Decimal.new("1.0")}
assert [balance] = AssetBalanceStore.all()
assert balance.free == Decimal.new("0.1")
assert balance.locked == Decimal.new("3.1")
end
test "returns free balance when max >= free balance and min = free balance" do
assert lock(:btc, 1.1, 2.2) == {:ok, Decimal.new("1.1")}
assert [balance] = AssetBalanceStore.all()
assert balance.free == Decimal.new("0.0")
assert balance.locked == Decimal.new("3.2")
end
test "returns free balance when max >= free balance and min < free balance" do
assert lock(:btc, 1.0, 2.2) == {:ok, Decimal.new("1.1")}
assert [balance] = AssetBalanceStore.all()
assert balance.free == Decimal.new("0.0")
assert balance.locked == Decimal.new("3.2")
end
test "returns an error tuple when the asset doesn't exist" do
assert lock(:xbt, 0.1, 2.2) == {:error, :not_found}
assert [balance] = AssetBalanceStore.all()
assert balance.free == Decimal.new("1.1")
assert balance.locked == Decimal.new("2.1")
end
test "returns an error tuple when min > free balance" do
assert lock(:btc, 1.11, 2.2) == {:error, {:insufficient_balance, Decimal.new("1.1")}}
assert [balance] = AssetBalanceStore.all()
assert balance.free == Decimal.new("1.1")
assert balance.locked == Decimal.new("2.1")
end
test "returns an error tuple when min > max" do
assert lock(:btc, 0.11, 0.1) == {:error, :min_greater_than_max}
assert [balance] = AssetBalanceStore.all()
assert balance.free == Decimal.new("1.1")
assert balance.locked == Decimal.new("2.1")
end
test "returns an error tuple when min < 0" do
assert lock(:btc, -0.1, 0.1) == {:error, :min_less_than_zero}
assert [balance] = AssetBalanceStore.all()
assert balance.free == Decimal.new("1.1")
assert balance.locked == Decimal.new("2.1")
end
test "broadcasts an event when successful" do
Tai.Events.firehose_subscribe()
lock(:btc, 0.5, 0.6)
assert_receive {Tai.Event, %Tai.Events.LockAssetBalanceOk{asset: :btc} = event, _}
assert event.venue_id == :my_test_exchange
assert event.account_id == :my_test_account
assert event.qty == Decimal.new("0.6")
assert event.min == Decimal.new("0.5")
assert event.max == Decimal.new("0.6")
end
test "broadcasts an event when unsuccessful" do
Tai.Events.firehose_subscribe()
lock(:btc, 1.2, 1.3)
assert_receive {Tai.Event,
%Tai.Events.LockAssetBalanceInsufficientFunds{asset: :btc} = event, _}
assert event.venue_id == :my_test_exchange
assert event.account_id == :my_test_account
assert event.min == Decimal.new("1.2")
assert event.max == Decimal.new("1.3")
assert event.free == Decimal.new("1.1")
end
end
describe ".unlock" do
setup [:init_asset_balance]
defp unlock(asset, qty) do
AssetBalanceStore.unlock(%AssetBalanceStore.UnlockRequest{
venue_id: :my_test_exchange,
account_id: :my_test_account,
asset: asset,
qty: qty |> Decimal.cast()
})
end
test "unlocks the balance for the asset" do
assert unlock(:btc, 1.0) == :ok
assert [balance] = AssetBalanceStore.all()
assert balance.free == Decimal.new("2.1")
assert balance.locked == Decimal.new("1.1")
end
test "doesn't unlock the balance if the asset doesn't exist" do
assert unlock(:xbt, 1.0) == {:error, :not_found}
assert [balance] = AssetBalanceStore.all()
assert balance.free == Decimal.new("1.1")
assert balance.locked == Decimal.new("2.1")
end
test "doesn't unlock the quantity when there is an insufficient locked balance" do
assert unlock(:btc, 2.11) == {:error, {:insufficient_balance, Decimal.new("2.1")}}
assert [balance] = AssetBalanceStore.all()
assert balance.free == Decimal.new("1.1")
assert balance.locked == Decimal.new("2.1")
end
test "broadcasts an event when successful" do
Tai.Events.firehose_subscribe()
unlock(:btc, 1.0)
assert_receive {Tai.Event, %Tai.Events.UnlockAssetBalanceOk{asset: :btc} = event, _}
assert event.venue_id == :my_test_exchange
assert event.account_id == :my_test_account
assert event.qty == Decimal.new("1.0")
end
test "broadcasts an event when unsuccessful" do
Tai.Events.firehose_subscribe()
unlock(:btc, 2.11)
assert_receive {Tai.Event,
%Tai.Events.UnlockAssetBalanceInsufficientFunds{asset: :btc} = event, _}
assert event.venue_id == :my_test_exchange
assert event.account_id == :my_test_account
assert event.locked == Decimal.new("2.1")
assert event.qty == Decimal.new("2.11")
end
end
describe ".add" do
setup [:init_asset_balance]
test "adds to free and returns an ok tuple with the new balance" do
assert {:ok, balance} =
AssetBalanceStore.add(
:my_test_exchange,
:my_test_account,
:btc,
Decimal.new("0.1")
)
assert balance.free == Decimal.new("1.2")
assert balance.locked == Decimal.new("2.1")
assert {:ok, balance} =
AssetBalanceStore.add(:my_test_exchange, :my_test_account, :btc, 0.1)
assert balance.free == Decimal.new("1.3")
assert balance.locked == Decimal.new("2.1")
assert {:ok, balance} =
AssetBalanceStore.add(
:my_test_exchange,
:my_test_account,
:btc,
"0.1"
)
assert balance.free == Decimal.new("1.4")
assert balance.locked == Decimal.new("2.1")
end
test "broadcasts an event with the updated balances" do
Tai.Events.firehose_subscribe()
AssetBalanceStore.add(:my_test_exchange, :my_test_account, :btc, 0.1)
assert_receive {Tai.Event, %Tai.Events.AddFreeAssetBalance{asset: :btc} = event, _}
assert event.venue_id == :my_test_exchange
assert event.account_id == :my_test_account
assert event.val == Decimal.new("0.1")
assert event.free == Decimal.new("1.2")
assert event.locked == Decimal.new("2.1")
end
test "returns an error tuple when the asset doesn't exist" do
assert AssetBalanceStore.add(:my_test_exchange, :my_test_account, :eth, 0.1) ==
{:error, :not_found}
end
test "returns an error tuple when the value is not positive" do
assert AssetBalanceStore.add(:my_test_exchange, :my_test_account, :btc, 0) ==
{:error, :value_must_be_positive}
assert AssetBalanceStore.add(:my_test_exchange, :my_test_account, :btc, -0.1) ==
{:error, :value_must_be_positive}
end
end
describe ".sub" do
setup [:init_asset_balance]
test "subtracts from free and returns an ok tuple with the new balance" do
assert {:ok, balance} =
AssetBalanceStore.sub(
:my_test_exchange,
:my_test_account,
:btc,
Decimal.new("0.1")
)
assert balance.free == Decimal.new("1.0")
assert balance.locked == Decimal.new("2.1")
assert {:ok, balance} =
AssetBalanceStore.sub(:my_test_exchange, :my_test_account, :btc, 0.1)
assert balance.free == Decimal.new("0.9")
assert balance.locked == Decimal.new("2.1")
assert {:ok, balance} =
AssetBalanceStore.sub(
:my_test_exchange,
:my_test_account,
:btc,
"0.1"
)
assert balance.free == Decimal.new("0.8")
assert balance.locked == Decimal.new("2.1")
end
test "broadcasts an event with the updated balances" do
Tai.Events.firehose_subscribe()
AssetBalanceStore.sub(:my_test_exchange, :my_test_account, :btc, 0.1)
assert_receive {Tai.Event, %Tai.Events.SubFreeAssetBalance{asset: :btc} = event, _}
assert event.venue_id == :my_test_exchange
assert event.account_id == :my_test_account
assert event.val == Decimal.new("0.1")
assert event.free == Decimal.new("1.0")
assert event.locked == Decimal.new("2.1")
end
test "returns an error tuple when the result is less than 0" do
assert {:ok, _balance} =
AssetBalanceStore.sub(:my_test_exchange, :my_test_account, :btc, 1.1)
assert AssetBalanceStore.sub(:my_test_exchange, :my_test_account, :btc, 1.1) ==
{:error, :result_less_then_zero}
end
test "returns an error tuple when the asset doesn't exist" do
assert AssetBalanceStore.sub(:my_test_exchange, :my_test_account, :eth, 0.1) ==
{:error, :not_found}
end
test "returns an error tuple when the value is not positive" do
assert AssetBalanceStore.sub(:my_test_exchange, :my_test_account, :btc, 0) ==
{:error, :value_must_be_positive}
assert AssetBalanceStore.sub(:my_test_exchange, :my_test_account, :btc, -0.1) ==
{:error, :value_must_be_positive}
end
end
@free Decimal.new("1.1")
@locked Decimal.new("2.1")
defp init_asset_balance(_context) do
balance = %Tai.Venues.AssetBalance{
venue_id: :my_test_exchange,
account_id: :my_test_account,
asset: :btc,
free: @free,
locked: @locked
}
:ok = AssetBalanceStore.upsert(balance)
{:ok, %{balance: balance}}
end
end
| 31.165179 | 94 | 0.60414 |
03d4b2ac9d49582927f99924b8b617e835ad69c6 | 2,090 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/language.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/language.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/language.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V33.Model.Language do
@moduledoc """
Contains information about a language that can be targeted by ads.
## Attributes
* `id` (*type:* `String.t`, *default:* `nil`) - Language ID of this language. This is the ID used for targeting and generating reports.
* `kind` (*type:* `String.t`, *default:* `nil`) - Identifies what kind of resource this is. Value: the fixed string "dfareporting#language".
* `languageCode` (*type:* `String.t`, *default:* `nil`) - Format of language code is an ISO 639 two-letter language code optionally followed by an underscore followed by an ISO 3166 code. Examples are "en" for English or "zh_CN" for Simplified Chinese.
* `name` (*type:* `String.t`, *default:* `nil`) - Name of this language.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:id => String.t(),
:kind => String.t(),
:languageCode => String.t(),
:name => String.t()
}
field(:id)
field(:kind)
field(:languageCode)
field(:name)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V33.Model.Language do
def decode(value, options) do
GoogleApi.DFAReporting.V33.Model.Language.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V33.Model.Language do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.321429 | 256 | 0.706699 |
03d4b4872030c1990ce6aa7164631831500854fd | 2,364 | exs | Elixir | elixir/test/link_leaks_test.exs | samuelventura/go-sdk | 7209256f70d39d142a2a9859061ac7ec6c19b2bc | [
"Unlicense"
] | null | null | null | elixir/test/link_leaks_test.exs | samuelventura/go-sdk | 7209256f70d39d142a2a9859061ac7ec6c19b2bc | [
"Unlicense"
] | null | null | null | elixir/test/link_leaks_test.exs | samuelventura/go-sdk | 7209256f70d39d142a2a9859061ac7ec6c19b2bc | [
"Unlicense"
] | null | null | null | defmodule LinkLeaksTest do
use ExUnit.Case
test "Process leaks on normal process exit" do
self = self()
pid =
spawn(fn ->
lid =
spawn_link(fn ->
receive do
any -> any
end
end)
send(self, lid)
end)
ref = :erlang.monitor(:process, pid)
assert_receive lid, 400
assert_receive {:DOWN, ^ref, :process, ^pid, :normal}, 400
assert true == Process.alive?(lid)
# garbage collection wont help
# :erlang.garbage_collect(pid)
# :timer.sleep(200)
# assert false == Process.alive?(lid)
end
test "Process leak on GenServer stop call" do
self = self()
{:ok, pid} =
GenServer.start(Server, fn ->
lid =
spawn_link(fn ->
receive do
any -> any
end
end)
send(self, lid)
{:ok, lid}
end)
assert_receive lid, 400
ref = :erlang.monitor(:process, pid)
GenServer.stop(pid)
assert_receive {:DOWN, ^ref, :process, ^pid, :normal}, 400
assert true == Process.alive?(lid)
end
test "Process leaks on GenServer normal stop" do
self = self()
{:error, :normal} =
GenServer.start(Server, fn ->
lid =
spawn_link(fn ->
receive do
any -> any
end
end)
send(self, {self(), lid})
{:stop, :normal}
end)
assert_receive {pid, lid}, 400
ref = :erlang.monitor(:process, pid)
assert_receive {:DOWN, ^ref, :process, ^pid, :noproc}, 400
assert true == Process.alive?(lid)
end
test "Agent leaks on normal process exit" do
self = self()
pid =
spawn(fn ->
{:ok, lid} = Agent.start_link(fn -> 0 end)
send(self, lid)
end)
ref = :erlang.monitor(:process, pid)
assert_receive lid, 400
assert_receive {:DOWN, ^ref, :process, ^pid, :normal}, 400
assert true == Process.alive?(lid)
end
test "Supervisor leaks on normal process exit" do
self = self()
pid =
spawn(fn ->
{:ok, lid} = Supervisor.start_link([], strategy: :one_for_one)
send(self, lid)
end)
ref = :erlang.monitor(:process, pid)
assert_receive lid, 400
assert_receive {:DOWN, ^ref, :process, ^pid, :normal}, 400
assert true == Process.alive?(lid)
end
end
| 22.730769 | 70 | 0.550338 |
03d4bd32886c5edf33b24705654ff995deecc5ea | 567 | ex | Elixir | inmana/lib/inmana/supplies/get_by_expiration.ex | cesaraugustomt/Inmana_API | c288f98d7dc1f3cbe8693914e38769c8dcc3a8d6 | [
"MIT"
] | 1 | 2021-05-18T00:04:36.000Z | 2021-05-18T00:04:36.000Z | inmana/lib/inmana/supplies/get_by_expiration.ex | cesaraugustomt/Inmana_API | c288f98d7dc1f3cbe8693914e38769c8dcc3a8d6 | [
"MIT"
] | null | null | null | inmana/lib/inmana/supplies/get_by_expiration.ex | cesaraugustomt/Inmana_API | c288f98d7dc1f3cbe8693914e38769c8dcc3a8d6 | [
"MIT"
] | null | null | null | defmodule Inmana.Supplies.GetByExpiration do
import Ecto.Query
alias Inmana.{Repo, Restaurant, Supply}
def call do
today = Date.utc_today()
beginning_of_week = Date.beginning_of_week(today)
end_of_week = Date.end_of_week(today)
query =
from(supply in Supply,
where:
supply.expiration_date >= ^beginning_of_week and supply.expiration_date <= ^end_of_week,
preload: [:restaurant]
)
query
|> Repo.all()
|> Enum.group_by(fn %Supply{restaurant: %Restaurant{email: email}} -> email end)
end
end
| 24.652174 | 98 | 0.671958 |
03d503b89c8fea4c8d26a3cf27586bde0f71a805 | 640 | ex | Elixir | lib/onigumo.ex | Glutexo/onigumo | 065492677f68ba0f0dfe30bb0ce5e8d55b708b2b | [
"MIT"
] | 3 | 2021-10-19T12:09:30.000Z | 2022-01-30T13:24:57.000Z | lib/onigumo.ex | Glutexo/onigumo | 065492677f68ba0f0dfe30bb0ce5e8d55b708b2b | [
"MIT"
] | 67 | 2020-02-26T10:11:45.000Z | 2022-03-30T11:06:55.000Z | lib/onigumo.ex | Glutexo/onigumo | 065492677f68ba0f0dfe30bb0ce5e8d55b708b2b | [
"MIT"
] | 1 | 2021-11-13T13:32:34.000Z | 2021-11-13T13:32:34.000Z | defmodule Onigumo do
@moduledoc """
Web scraper
"""
@input_filename "urls.txt"
@output_filename "body.html"
def main() do
HTTPoison.start()
http = http_client()
load_urls(@input_filename)
|> Enum.map(&download(http, &1))
end
def download(http_client, url) do
%HTTPoison.Response{
status_code: 200,
body: body
} = http_client.get!(url)
File.write!(@output_filename, body)
end
def load_urls(filepath) do
File.stream!(filepath, [:read], :line)
|> Enum.map(&String.trim_trailing/1)
end
defp http_client() do
Application.get_env(:onigumo, :http_client)
end
end
| 18.823529 | 47 | 0.65 |
03d5193d607a215ae789876bc6df877bccf34f9b | 1,220 | exs | Elixir | mix.exs | tmbb/makeup_html5 | 747b51547ca5e54ca93b3fe0707cd8223082566f | [
"BSD-2-Clause"
] | null | null | null | mix.exs | tmbb/makeup_html5 | 747b51547ca5e54ca93b3fe0707cd8223082566f | [
"BSD-2-Clause"
] | null | null | null | mix.exs | tmbb/makeup_html5 | 747b51547ca5e54ca93b3fe0707cd8223082566f | [
"BSD-2-Clause"
] | 1 | 2020-04-20T09:20:36.000Z | 2020-04-20T09:20:36.000Z | defmodule Makeup.Languages.HTML5.Mixfile do
use Mix.Project
def project do
[
app: :makeup_html5,
version: "0.2.0",
elixir: "~> 1.0",
start_permanent: Mix.env == :prod,
deps: deps(),
# Docs
name: "Makeup - HTML5 Support",
source_url: "https://github.com/tmbb/makeup_html5",
homepage_url: "https://github.com/tmbb/makeup_html5",
docs: [main: "Makeup.Lexers.HTML5Lexer", # The main page in the docs
extras: ["README.md"]],
# Package
package: package(),
description: description()
]
end
defp description do
"""
HTML5 lexer for the Makeup syntax highlighting library
"""
end
defp package do
[
name: :makeup_html5,
licenses: ["BSD"],
maintainers: ["Tiago Barroso <tmbb@campus.ul.pt>"],
links: %{"GitHub" => "https://github.com/tmbb/makeup_html5"}
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:makeup, "~> 0.2.0"},
{:ex_doc, "~> 0.16.3", only: :dev, runtime: false}
]
end
end
| 23.018868 | 74 | 0.585246 |
03d5b8405e527595e91e8561c3fcd807971f4d0e | 11,569 | ex | Elixir | kousa/lib/kousa/room.ex | BronzW/dogehouse | 6cebfd22af728112a861a9e2ea6202cd848abab6 | [
"MIT"
] | null | null | null | kousa/lib/kousa/room.ex | BronzW/dogehouse | 6cebfd22af728112a861a9e2ea6202cd848abab6 | [
"MIT"
] | null | null | null | kousa/lib/kousa/room.ex | BronzW/dogehouse | 6cebfd22af728112a861a9e2ea6202cd848abab6 | [
"MIT"
] | null | null | null | defmodule Kousa.Room do
alias Kousa.Utils.VoiceServerUtils
alias Beef.Users
alias Beef.Follows
alias Beef.Rooms
# note the following 2 module aliases are on the chopping block!
alias Beef.RoomPermissions
alias Beef.RoomBlocks
def set_auto_speaker(user_id, value) do
if room = Rooms.get_room_by_creator_id(user_id) do
Onion.RoomSession.set_auto_speaker(room.id, value)
end
end
@spec make_room_public(any, any) :: nil | :ok
def make_room_public(user_id, new_name) do
# this needs to be refactored if a user can have multiple rooms
case Beef.Rooms.set_room_privacy_by_creator_id(user_id, false, new_name) do
{1, [room]} ->
Onion.RoomSession.broadcast_ws(
room.id,
%{op: "room_privacy_change", d: %{roomId: room.id, name: room.name, isPrivate: false}}
)
_ ->
nil
end
end
@spec make_room_private(any, any) :: nil | :ok
def make_room_private(user_id, new_name) do
# this needs to be refactored if a user can have multiple rooms
case Rooms.set_room_privacy_by_creator_id(user_id, true, new_name) do
{1, [room]} ->
Onion.RoomSession.broadcast_ws(
room.id,
%{op: "room_privacy_change", d: %{roomId: room.id, name: room.name, isPrivate: true}}
)
_ ->
nil
end
end
def invite_to_room(user_id, user_id_to_invite) do
user = Beef.Users.get_by_id(user_id)
if user.currentRoomId && Follows.following_me?(user_id, user_id_to_invite) do
# @todo store room name in RoomSession to avoid db lookups
room = Rooms.get_room_by_id(user.currentRoomId)
if not is_nil(room) do
Onion.RoomSession.create_invite(
user.currentRoomId,
user_id_to_invite,
%{
roomName: room.name,
displayName: user.displayName,
username: user.username,
avatarUrl: user.avatarUrl,
type: "invite"
}
)
end
end
end
defp internal_kick_from_room(user_id_to_kick, room_id) do
current_room_id = Beef.Users.get_current_room_id(user_id_to_kick)
if current_room_id == room_id do
Rooms.kick_from_room(user_id_to_kick, current_room_id)
Onion.RoomSession.kick_from_room(current_room_id, user_id_to_kick)
end
end
def block_from_room(user_id, user_id_to_block_from_room) do
with {status, room} when status in [:creator, :mod] <-
Rooms.get_room_status(user_id) do
if room.creatorId != user_id_to_block_from_room do
RoomBlocks.insert(%{
modId: user_id,
userId: user_id_to_block_from_room,
roomId: room.id
})
internal_kick_from_room(user_id_to_block_from_room, room.id)
end
end
end
defp internal_set_listener(user_id_to_make_listener, room_id) do
RoomPermissions.make_listener(user_id_to_make_listener, room_id)
Onion.RoomSession.remove_speaker(room_id, user_id_to_make_listener)
end
def set_listener(user_id, user_id_to_set_listener) do
if user_id == user_id_to_set_listener do
internal_set_listener(
user_id_to_set_listener,
Beef.Users.get_current_room_id(user_id_to_set_listener)
)
else
{status, room} = Rooms.get_room_status(user_id)
is_creator = user_id_to_set_listener == not is_nil(room) and room.creatorId
if not is_creator and (status == :creator or status == :mod) do
internal_set_listener(
user_id_to_set_listener,
Beef.Users.get_current_room_id(user_id_to_set_listener)
)
end
end
end
@spec internal_set_speaker(any, any) :: nil | :ok | {:err, {:error, :not_found}}
def internal_set_speaker(user_id_to_make_speaker, room_id) do
case RoomPermissions.set_speaker(user_id_to_make_speaker, room_id, true) do
{:ok, _} ->
# kind of horrible to have to make a double genserver call
# here, we'll have to think about how this works (who owns muting)
Onion.RoomSession.add_speaker(
room_id,
user_id_to_make_speaker,
Onion.UserSession.get(user_id_to_make_speaker, :muted)
)
err ->
{:err, err}
end
catch
_, _ ->
{:error, "room not found"}
end
def make_speaker(user_id, user_id_to_make_speaker) do
with {status, room} when status in [:creator, :mod] <-
Rooms.get_room_status(user_id),
true <- RoomPermissions.asked_to_speak?(user_id_to_make_speaker, room.id) do
internal_set_speaker(user_id_to_make_speaker, room.id)
end
end
def change_mod(user_id, user_id_to_change, value) when is_boolean(value) do
if room = Rooms.get_room_by_creator_id(user_id) do
RoomPermissions.set_is_mod(user_id_to_change, room.id, value)
Onion.RoomSession.broadcast_ws(
room.id,
%{
op: "mod_changed",
d: %{roomId: room.id, userId: user_id_to_change}
}
)
end
end
def change_room_creator(old_creator_id, new_creator_id) do
# get current room id
current_room_id = Beef.Users.get_current_room_id(new_creator_id)
is_speaker = Beef.RoomPermissions.speaker?(new_creator_id, current_room_id)
# get old creator's room id for validation
old_creator_room_id = Beef.Users.get_current_room_id(old_creator_id)
if is_speaker and not is_nil(current_room_id) and new_creator_id != old_creator_id and
current_room_id == old_creator_room_id do
case Rooms.replace_room_owner(old_creator_id, new_creator_id) do
{1, _} ->
internal_set_speaker(old_creator_id, current_room_id)
Beef.RoomPermissions
Onion.RoomSession.broadcast_ws(
current_room_id,
%{op: "new_room_creator", d: %{roomId: current_room_id, userId: new_creator_id}}
)
_ ->
nil
end
end
nil
end
def join_vc_room(user_id, room, speaker? \\ nil) do
speaker? =
if is_nil(speaker?),
do:
room.creatorId == user_id or
RoomPermissions.speaker?(user_id, room.id),
else: speaker?
op =
if speaker?,
do: "join-as-speaker",
else: "join-as-new-peer"
Onion.VoiceRabbit.send(room.voiceServerId, %{
op: op,
d: %{roomId: room.id, peerId: user_id},
uid: user_id
})
end
def edit_room(user_id, new_name, new_description, is_private) do
if room = Rooms.get_room_by_creator_id(user_id) do
case Rooms.edit(room.id, %{
name: new_name,
description: new_description,
is_private: is_private
}) do
{:ok, _room} ->
Onion.RoomSession.broadcast_ws(room.id, %{
op: "new_room_details",
d: %{
name: new_name,
description: new_description,
isPrivate: is_private,
roomId: room.id
}
})
{:error, x} ->
{:error, Kousa.Utils.Errors.changeset_to_first_err_message_with_field_name(x)}
end
end
end
@spec create_room(String.t(), String.t(), String.t(), boolean(), String.t() | nil) ::
{:error, any}
| {:ok, %{room: atom | %{:id => any, :voiceServerId => any, optional(any) => any}}}
def create_room(user_id, room_name, room_description, is_private, user_id_to_invite \\ nil) do
room_id = Users.get_current_room_id(user_id)
if not is_nil(room_id) do
leave_room(user_id, room_id)
end
id = Ecto.UUID.generate()
case Rooms.create(%{
id: id,
name: room_name,
description: room_description,
creatorId: user_id,
numPeopleInside: 1,
voiceServerId: VoiceServerUtils.get_next_voice_server_id(),
isPrivate: is_private
}) do
{:ok, room} ->
Onion.RoomSession.start_supervised(
room_id: room.id,
voice_server_id: room.voiceServerId
)
muted? = Onion.UserSession.get(user_id, :muted)
Onion.RoomSession.join_room(room.id, user_id, muted?, no_fan: true)
Onion.VoiceRabbit.send(room.voiceServerId, %{
op: "create-room",
d: %{roomId: id},
uid: user_id
})
join_vc_room(user_id, room, true)
if not is_private do
Kousa.Follow.notify_followers_you_created_a_room(user_id, room)
end
if not is_nil(user_id_to_invite) do
# TODO: change this to Task.Supervised
Task.start(fn ->
Kousa.Room.invite_to_room(user_id, user_id_to_invite)
end)
end
{:ok, %{room: room}}
{:error, x} ->
{:error, Kousa.Utils.Errors.changeset_to_first_err_message_with_field_name(x)}
end
end
# NB this function does not correctly return an updated room struct if the
# action is valid.
# NB2, this function has an non-idiomatic parameter order. room_id should
# come first.
def join_room(user_id, room_id) do
currentRoomId = Beef.Users.get_current_room_id(user_id)
if currentRoomId == room_id do
%{room: Rooms.get_room_by_id(room_id)}
else
case Rooms.can_join_room(room_id, user_id) do
{:error, message} ->
%{error: message}
{:ok, room} ->
private_check =
if room.isPrivate do
case Onion.RoomSession.redeem_invite(room.id, user_id) do
:error ->
{:error, "the room is private, ask someone inside to invite you"}
:ok ->
:ok
end
else
:ok
end
case private_check do
{:error, m} ->
%{error: m}
:ok ->
if currentRoomId do
leave_room(user_id, currentRoomId)
end
updated_user = Rooms.join_room(room, user_id)
muted = Onion.UserSession.get(user_id, :muted)
Onion.RoomSession.join_room(room_id, user_id, muted)
canSpeak =
case updated_user do
%{roomPermissions: %{isSpeaker: true}} -> true
_ -> false
end
join_vc_room(user_id, room, canSpeak || room.isPrivate)
%{room: room}
end
end
end
catch
_, _ -> {:error, "that room doesn't exist"}
end
def leave_room(user_id, current_room_id \\ nil) do
current_room_id =
if is_nil(current_room_id),
do: Beef.Users.get_current_room_id(user_id),
else: current_room_id
if current_room_id do
case Rooms.leave_room(user_id, current_room_id) do
# the room should be destroyed
{:bye, room} ->
Onion.RoomSession.destroy(current_room_id, user_id)
Onion.VoiceRabbit.send(room.voiceServerId, %{
op: "destroy-room",
uid: user_id,
d: %{peerId: user_id, roomId: current_room_id}
})
# the room stays alive with new room creator
x ->
case x do
{:new_creator_id, creator_id} ->
Onion.RoomSession.broadcast_ws(
current_room_id,
%{op: "new_room_creator", d: %{roomId: current_room_id, userId: creator_id}}
)
_ ->
nil
end
Onion.RoomSession.leave_room(current_room_id, user_id)
end
{:ok, %{roomId: current_room_id}}
else
{:error, "you are not in a room"}
end
end
end
| 29.894057 | 96 | 0.616302 |
03d602861a40907d00c74bd6e1c88a430571ee74 | 2,068 | ex | Elixir | lib/farmbot.ex | KRISHITECH/farmbot_os | 4220baf8f3ef930ce3b861b178ca6a76e2fd7591 | [
"MIT"
] | null | null | null | lib/farmbot.ex | KRISHITECH/farmbot_os | 4220baf8f3ef930ce3b861b178ca6a76e2fd7591 | [
"MIT"
] | null | null | null | lib/farmbot.ex | KRISHITECH/farmbot_os | 4220baf8f3ef930ce3b861b178ca6a76e2fd7591 | [
"MIT"
] | null | null | null | defmodule Farmbot do
@moduledoc """
Supervises the individual modules that make up the Farmbot Application.
"""
require Logger
use Supervisor
alias Farmbot.Sync.Database
alias Farmbot.System.Supervisor, as: FBSYS
@spec init(map) :: [{:ok, pid}]
def init(%{target: target,
compat_version: compat_version,
version: version,
commit: commit})
do
children = [
# system specifics
supervisor(FBSYS, [target: target], restart: :permanent),
# auth services
worker(Farmbot.Auth, [], restart: :permanent),
# web app
supervisor(Farmbot.Configurator, [], restart: :permanent),
# Generic counter.
worker(Counter, [], restart: :permanent),
# The worker for diffing db entries.
worker(Farmbot.Sync.Supervisor, [], restart: :permanent),
# Handles tracking of various parts of the bots state.
supervisor(Farmbot.BotState.Supervisor,
[%{target: target,
compat_version: compat_version,
version: version,
commit: commit}], restart: :permanent),
# Handles FarmEvents
supervisor(FarmEvent.Supervisor, [], restart: :permanent),
# Handles the passing of messages from one part of the system to another.
supervisor(Farmbot.Transport.Supervisor, [], restart: :permanent),
# Handles external scripts and what not
supervisor(Farmware.Supervisor, [], restart: :permanent),
# handles communications between bot and arduino
supervisor(Farmbot.Serial.Supervisor, [], restart: :permanent),
worker(Farmbot.ImageWatcher, [], restart: :permanent)
]
opts = [strategy: :one_for_one]
supervise(children, opts)
end
@doc """
Entry Point to Farmbot
"""
@spec start(atom, [any]) :: {:ok, pid}
def start(type, args)
def start(_, [args]) do
Logger.info ">> init!"
Amnesia.start
Database.create! Keyword.put([], :memory, [node()])
Database.wait(15_000)
Supervisor.start_link(__MODULE__, args, name: Farmbot.Supervisor)
end
end
| 32.3125 | 79 | 0.647485 |
03d6065874164dc923358359909aca5e64cecb88 | 132 | ex | Elixir | lib/ex_clearbit/exception.ex | EnaiaInc/ex_clearbit | 16d0ecc8ffe75c2df70fda703c5c085dbe20bea6 | [
"MIT"
] | 3 | 2017-03-16T20:55:36.000Z | 2018-01-29T04:20:04.000Z | lib/ex_clearbit/exception.ex | EnaiaInc/ex_clearbit | 16d0ecc8ffe75c2df70fda703c5c085dbe20bea6 | [
"MIT"
] | 9 | 2016-12-24T05:02:03.000Z | 2020-03-10T20:37:14.000Z | lib/ex_clearbit/exception.ex | EnaiaInc/ex_clearbit | 16d0ecc8ffe75c2df70fda703c5c085dbe20bea6 | [
"MIT"
] | 10 | 2017-06-29T15:36:24.000Z | 2021-12-03T16:30:02.000Z | defmodule ExClearbit.Error do
@moduledoc """
Generic error thrown by the application
"""
defexception [:code, :message]
end
| 18.857143 | 41 | 0.719697 |
03d60774ce386433c04d393f9a7a15a7ce8e971f | 2,093 | ex | Elixir | clients/cloud_build/lib/google_api/cloud_build/v1/model/network_config.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/cloud_build/lib/google_api/cloud_build/v1/model/network_config.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/cloud_build/lib/google_api/cloud_build/v1/model/network_config.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudBuild.V1.Model.NetworkConfig do
@moduledoc """
Defines the network configuration for the pool.
## Attributes
* `egressOption` (*type:* `String.t`, *default:* `nil`) - Option to configure network egress for the workers.
* `peeredNetwork` (*type:* `String.t`, *default:* `nil`) - Required. Immutable. The network definition that the workers are peered to. If this section is left empty, the workers will be peered to `WorkerPool.project_id` on the service producer network. Must be in the format `projects/{project}/global/networks/{network}`, where `{project}` is a project number, such as `12345`, and `{network}` is the name of a VPC network in the project. See [Understanding network configuration options](https://cloud.google.com/build/docs/private-pools/set-up-private-pool-environment)
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:egressOption => String.t() | nil,
:peeredNetwork => String.t() | nil
}
field(:egressOption)
field(:peeredNetwork)
end
defimpl Poison.Decoder, for: GoogleApi.CloudBuild.V1.Model.NetworkConfig do
def decode(value, options) do
GoogleApi.CloudBuild.V1.Model.NetworkConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudBuild.V1.Model.NetworkConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.86 | 576 | 0.73913 |
03d60eb8eb5a604375c2908ee2dbe57419092c76 | 34,452 | ex | Elixir | clients/source_repo/lib/google_api/source_repo/v1/api/projects.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/source_repo/lib/google_api/source_repo/v1/api/projects.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/source_repo/lib/google_api/source_repo/v1/api/projects.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SourceRepo.V1.Api.Projects do
@moduledoc """
API calls for all endpoints tagged `Projects`.
"""
alias GoogleApi.SourceRepo.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Returns the Cloud Source Repositories configuration of the project.
## Parameters
* `connection` (*type:* `GoogleApi.SourceRepo.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The name of the requested project. Values are of the form
`projects/<project>`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SourceRepo.V1.Model.ProjectConfig{}}` on success
* `{:error, info}` on failure
"""
@spec sourcerepo_projects_get_config(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.SourceRepo.V1.Model.ProjectConfig.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def sourcerepo_projects_get_config(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+name}/config", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SourceRepo.V1.Model.ProjectConfig{}])
end
@doc """
Updates the Cloud Source Repositories configuration of the project.
## Parameters
* `connection` (*type:* `GoogleApi.SourceRepo.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The name of the requested project. Values are of the form
`projects/<project>`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.SourceRepo.V1.Model.UpdateProjectConfigRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SourceRepo.V1.Model.ProjectConfig{}}` on success
* `{:error, info}` on failure
"""
@spec sourcerepo_projects_update_config(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.SourceRepo.V1.Model.ProjectConfig.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def sourcerepo_projects_update_config(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v1/{+name}/config", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SourceRepo.V1.Model.ProjectConfig{}])
end
@doc """
Creates a repo in the given project with the given name.
If the named repository already exists, `CreateRepo` returns
`ALREADY_EXISTS`.
## Parameters
* `connection` (*type:* `GoogleApi.SourceRepo.V1.Connection.t`) - Connection to server
* `parent` (*type:* `String.t`) - The project in which to create the repo. Values are of the form
`projects/<project>`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.SourceRepo.V1.Model.Repo.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SourceRepo.V1.Model.Repo{}}` on success
* `{:error, info}` on failure
"""
@spec sourcerepo_projects_repos_create(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.SourceRepo.V1.Model.Repo.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def sourcerepo_projects_repos_create(connection, parent, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+parent}/repos", %{
"parent" => URI.encode(parent, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SourceRepo.V1.Model.Repo{}])
end
@doc """
Deletes a repo.
## Parameters
* `connection` (*type:* `GoogleApi.SourceRepo.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The name of the repo to delete. Values are of the form
`projects/<project>/repos/<repo>`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SourceRepo.V1.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec sourcerepo_projects_repos_delete(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.SourceRepo.V1.Model.Empty.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def sourcerepo_projects_repos_delete(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SourceRepo.V1.Model.Empty{}])
end
@doc """
Returns information about a repo.
## Parameters
* `connection` (*type:* `GoogleApi.SourceRepo.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The name of the requested repository. Values are of the form
`projects/<project>/repos/<repo>`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SourceRepo.V1.Model.Repo{}}` on success
* `{:error, info}` on failure
"""
@spec sourcerepo_projects_repos_get(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.SourceRepo.V1.Model.Repo.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def sourcerepo_projects_repos_get(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SourceRepo.V1.Model.Repo{}])
end
@doc """
Gets the access control policy for a resource.
Returns an empty policy if the resource exists and does not have a policy
set.
## Parameters
* `connection` (*type:* `GoogleApi.SourceRepo.V1.Connection.t`) - Connection to server
* `resource` (*type:* `String.t`) - REQUIRED: The resource for which the policy is being requested.
See the operation documentation for the appropriate value for this field.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:"options.requestedPolicyVersion"` (*type:* `integer()`) - Optional. The policy format version to be returned.
Valid values are 0, 1, and 3. Requests specifying an invalid value will be
rejected.
Requests for policies with any conditional bindings must specify version 3.
Policies without any conditional bindings may specify any valid value or
leave the field unset.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SourceRepo.V1.Model.Policy{}}` on success
* `{:error, info}` on failure
"""
@spec sourcerepo_projects_repos_get_iam_policy(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SourceRepo.V1.Model.Policy.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def sourcerepo_projects_repos_get_iam_policy(
connection,
resource,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:"options.requestedPolicyVersion" => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+resource}:getIamPolicy", %{
"resource" => URI.encode(resource, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SourceRepo.V1.Model.Policy{}])
end
@doc """
Returns all repos belonging to a project. The sizes of the repos are
not set by ListRepos. To get the size of a repo, use GetRepo.
## Parameters
* `connection` (*type:* `GoogleApi.SourceRepo.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The project ID whose repos should be listed. Values are of the form
`projects/<project>`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:pageSize` (*type:* `integer()`) - Maximum number of repositories to return; between 1 and 500.
If not set or zero, defaults to 100 at the server.
* `:pageToken` (*type:* `String.t`) - Resume listing repositories where a prior ListReposResponse
left off. This is an opaque token that must be obtained from
a recent, prior ListReposResponse's next_page_token field.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SourceRepo.V1.Model.ListReposResponse{}}` on success
* `{:error, info}` on failure
"""
@spec sourcerepo_projects_repos_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.SourceRepo.V1.Model.ListReposResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def sourcerepo_projects_repos_list(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+name}/repos", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SourceRepo.V1.Model.ListReposResponse{}])
end
@doc """
Updates information about a repo.
## Parameters
* `connection` (*type:* `GoogleApi.SourceRepo.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The name of the requested repository. Values are of the form
`projects/<project>/repos/<repo>`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.SourceRepo.V1.Model.UpdateRepoRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SourceRepo.V1.Model.Repo{}}` on success
* `{:error, info}` on failure
"""
@spec sourcerepo_projects_repos_patch(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.SourceRepo.V1.Model.Repo.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def sourcerepo_projects_repos_patch(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SourceRepo.V1.Model.Repo{}])
end
@doc """
Sets the access control policy on the specified resource. Replaces any
existing policy.
## Parameters
* `connection` (*type:* `GoogleApi.SourceRepo.V1.Connection.t`) - Connection to server
* `resource` (*type:* `String.t`) - REQUIRED: The resource for which the policy is being specified.
See the operation documentation for the appropriate value for this field.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.SourceRepo.V1.Model.SetIamPolicyRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SourceRepo.V1.Model.Policy{}}` on success
* `{:error, info}` on failure
"""
@spec sourcerepo_projects_repos_set_iam_policy(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SourceRepo.V1.Model.Policy.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def sourcerepo_projects_repos_set_iam_policy(
connection,
resource,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+resource}:setIamPolicy", %{
"resource" => URI.encode(resource, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SourceRepo.V1.Model.Policy{}])
end
@doc """
Synchronize a connected repo.
The response contains SyncRepoMetadata in the metadata field.
## Parameters
* `connection` (*type:* `GoogleApi.SourceRepo.V1.Connection.t`) - Connection to server
* `name` (*type:* `String.t`) - The name of the repo to synchronize. Values are of the form
`projects/<project>/repos/<repo>`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.SourceRepo.V1.Model.SyncRepoRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SourceRepo.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec sourcerepo_projects_repos_sync(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.SourceRepo.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def sourcerepo_projects_repos_sync(connection, name, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+name}:sync", %{
"name" => URI.encode(name, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.SourceRepo.V1.Model.Operation{}])
end
@doc """
Returns permissions that a caller has on the specified resource.
If the resource does not exist, this will return an empty set of
permissions, not a NOT_FOUND error.
## Parameters
* `connection` (*type:* `GoogleApi.SourceRepo.V1.Connection.t`) - Connection to server
* `resource` (*type:* `String.t`) - REQUIRED: The resource for which the policy detail is being requested.
See the operation documentation for the appropriate value for this field.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.SourceRepo.V1.Model.TestIamPermissionsRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.SourceRepo.V1.Model.TestIamPermissionsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec sourcerepo_projects_repos_test_iam_permissions(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.SourceRepo.V1.Model.TestIamPermissionsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def sourcerepo_projects_repos_test_iam_permissions(
connection,
resource,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v1/{+resource}:testIamPermissions", %{
"resource" => URI.encode(resource, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.SourceRepo.V1.Model.TestIamPermissionsResponse{}]
)
end
end
| 45.391304 | 196 | 0.61741 |
03d68811c3759d89a96454928710fb972e926302 | 1,222 | ex | Elixir | rudy.ex | evansaboo/elixir-programming | 57408424914003091003430500473546c94354d9 | [
"MIT"
] | null | null | null | rudy.ex | evansaboo/elixir-programming | 57408424914003091003430500473546c94354d9 | [
"MIT"
] | null | null | null | rudy.ex | evansaboo/elixir-programming | 57408424914003091003430500473546c94354d9 | [
"MIT"
] | null | null | null | defmodule Rudy do
def start(port) do
Process.register(spawn(fn-> init(port) end), :rudy)
end
def stop() do
Process.exit(Process.whereis(:rudy), "Time to die!")
end
def init(port) do
opt = [:list, active: false, reuseaddr: true]
case :gen_tcp.listen(port, opt) do
{:ok, listen} ->
handler(listen)
:gen_tcp.close(listen)
:ok
{:error, error} ->
error
end
end
def handler(listen) do
case :gen_tcp.accept(listen) do
{:ok, client} ->
request(client)
handler(listen)
{:error, error} ->
error
end
end
def request(client) do
recv = :gen_tcp.recv(client, 0)
case recv do
{:ok, str} ->
parsed_msg = HTTP.parse_request(str)
response = reply(parsed_msg)
:gen_tcp.send(client, response)
{:error, error} ->
IO.puts("RUDY ERROR: #{error}")
end
:gen_tcp.close(client)
end
def reply({{:get, uri, _}, _, _}) do
:timer.sleep(10)
HTTP.ok("Hello!")
end
end | 23.960784 | 60 | 0.477905 |
03d693303e94a432aba3f01af77c5e6b3dc751e5 | 1,378 | exs | Elixir | mix.exs | hrobeers/fran-planner | 1f049ad65f5d18991a8abeeb6ba7fb583463630a | [
"BSD-2-Clause"
] | 3 | 2015-07-18T22:21:30.000Z | 2015-07-19T16:25:35.000Z | mix.exs | hrobeers/fran-planner | 1f049ad65f5d18991a8abeeb6ba7fb583463630a | [
"BSD-2-Clause"
] | null | null | null | mix.exs | hrobeers/fran-planner | 1f049ad65f5d18991a8abeeb6ba7fb583463630a | [
"BSD-2-Clause"
] | null | null | null | defmodule FranAppBackend.Mixfile do
use Mix.Project
def project do
[app: :fran_app_backend,
version: "0.0.2",
elixir: "~> 1.0",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[mod: {FranAppBackend, []},
applications: [:phoenix, :phoenix_html, :cowboy, :logger,
:phoenix_ecto, :postgrex, :plug_cors]]
end
# Specifies which paths to compile per environment
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies
#
# Type `mix help deps` for examples and options
defp deps do
[{:plug_cors, "~> 0.7"},
{:phoenix, "~> 0.13"},
{:phoenix_ecto, "~> 0.4"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 1.0"},
{:phoenix_live_reload, "~> 0.4", only: :dev},
{:cowboy, "~> 1.0"},
{:fran_app_html, git: "https://github.com/hrobeers/fran-planner-html.git",
branch: "include",
app: false,
compile: "cp -r * ../../priv/static/."},
{:exrm, "~> 0.17.0"}]
end
end
| 29.956522 | 79 | 0.571118 |
03d694dce38cccc58c08ee36cd977c4c8d47406b | 59 | ex | Elixir | lib/ginapi_web/views/page_view.ex | iandeherdt/phoenixginapp | 4079a71130a8609b36118080757e10a80d50ebac | [
"Apache-2.0"
] | null | null | null | lib/ginapi_web/views/page_view.ex | iandeherdt/phoenixginapp | 4079a71130a8609b36118080757e10a80d50ebac | [
"Apache-2.0"
] | null | null | null | lib/ginapi_web/views/page_view.ex | iandeherdt/phoenixginapp | 4079a71130a8609b36118080757e10a80d50ebac | [
"Apache-2.0"
] | null | null | null | defmodule GinapiWeb.PageView do
use GinapiWeb, :view
end
| 14.75 | 31 | 0.79661 |
03d6c15c0d6ccf3344ced5d1c6a9a5c758b827ef | 3,482 | ex | Elixir | clients/network_management/lib/google_api/network_management/v1beta1/model/firewall_info.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/network_management/lib/google_api/network_management/v1beta1/model/firewall_info.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/network_management/lib/google_api/network_management/v1beta1/model/firewall_info.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.NetworkManagement.V1beta1.Model.FirewallInfo do
@moduledoc """
For display only. Metadata associated with a VPC firewall rule, an implied VPC firewall rule, or a hierarchical firewall policy rule.
## Attributes
* `action` (*type:* `String.t`, *default:* `nil`) - Possible values: ALLOW, DENY
* `direction` (*type:* `String.t`, *default:* `nil`) - Possible values: INGRESS, EGRESS
* `displayName` (*type:* `String.t`, *default:* `nil`) - The display name of the VPC firewall rule. This field is not applicable to hierarchical firewall policy rules.
* `firewallRuleType` (*type:* `String.t`, *default:* `nil`) - The firewall rule's type.
* `networkUri` (*type:* `String.t`, *default:* `nil`) - The URI of the VPC network that the firewall rule is associated with. This field is not applicable to hierarchical firewall policy rules.
* `policy` (*type:* `String.t`, *default:* `nil`) - The hierarchical firewall policy that this rule is associated with. This field is not applicable to VPC firewall rules.
* `priority` (*type:* `integer()`, *default:* `nil`) - The priority of the firewall rule.
* `targetServiceAccounts` (*type:* `list(String.t)`, *default:* `nil`) - The target service accounts specified by the firewall rule.
* `targetTags` (*type:* `list(String.t)`, *default:* `nil`) - The target tags defined by the VPC firewall rule. This field is not applicable to hierarchical firewall policy rules.
* `uri` (*type:* `String.t`, *default:* `nil`) - The URI of the VPC firewall rule. This field is not applicable to implied firewall rules or hierarchical firewall policy rules.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:action => String.t() | nil,
:direction => String.t() | nil,
:displayName => String.t() | nil,
:firewallRuleType => String.t() | nil,
:networkUri => String.t() | nil,
:policy => String.t() | nil,
:priority => integer() | nil,
:targetServiceAccounts => list(String.t()) | nil,
:targetTags => list(String.t()) | nil,
:uri => String.t() | nil
}
field(:action)
field(:direction)
field(:displayName)
field(:firewallRuleType)
field(:networkUri)
field(:policy)
field(:priority)
field(:targetServiceAccounts, type: :list)
field(:targetTags, type: :list)
field(:uri)
end
defimpl Poison.Decoder, for: GoogleApi.NetworkManagement.V1beta1.Model.FirewallInfo do
def decode(value, options) do
GoogleApi.NetworkManagement.V1beta1.Model.FirewallInfo.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.NetworkManagement.V1beta1.Model.FirewallInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 47.054054 | 197 | 0.692705 |
03d6cd4375221362df3d29a62a09b3dd5977a9bd | 1,066 | exs | Elixir | test/kindle_clippings_parser/clipping_test.exs | clintgibler/kindle_clippings_parser | 86654721b7f12d11952b5e18364d67aef8805ce9 | [
"MIT"
] | null | null | null | test/kindle_clippings_parser/clipping_test.exs | clintgibler/kindle_clippings_parser | 86654721b7f12d11952b5e18364d67aef8805ce9 | [
"MIT"
] | null | null | null | test/kindle_clippings_parser/clipping_test.exs | clintgibler/kindle_clippings_parser | 86654721b7f12d11952b5e18364d67aef8805ce9 | [
"MIT"
] | null | null | null | defmodule KindleClippingsParser.ClippingTest do
use ExUnit.Case
alias KindleClippingsParser.Clipping
test "from_text" do
text = ~S"""
Foobar
- Highlight Loc. 125-26 | Added on Monday, March 09, 1970, 08:35 PM
The most generic of all books.
So good.
"""
clipping = Clipping.from_text(text)
assert clipping.book_name == "Foobar"
assert clipping.highlight_range == "125-26"
assert clipping.time_added == "Monday, March 09, 1970, 08:35 PM"
assert clipping.text == "The most generic of all books.\nSo good."
end
test "get_highlight_range_and_time_added" do
line = "- Highlight Loc. 2949 | Added on Wednesday, December 30, 1970, 12:58 AM"
{highlight_range, time_added} = Clipping.get_highlight_range_and_time_added(line)
assert highlight_range == "2949"
end
test "from_file" do
file = "test/data/My Clippings.txt"
clippings = Clipping.from_file(file)
assert Enum.count(clippings) == 3
assert Enum.map(clippings, fn(c) -> c.book_name end) == ["Foobar", "Test", "Foobar"]
end
end
| 30.457143 | 88 | 0.690432 |
03d6fdd566f81f6d6ea184f5434c60e1eb79c6d0 | 1,972 | ex | Elixir | lib/helpers_sqlite.ex | areski/excdr-pusher | 45389e29ab97d11585f22fe29fd7da54d9f31ef8 | [
"MIT"
] | 3 | 2019-04-18T15:36:50.000Z | 2021-01-19T04:38:48.000Z | lib/helpers_sqlite.ex | areski/excdr-pusher | 45389e29ab97d11585f22fe29fd7da54d9f31ef8 | [
"MIT"
] | null | null | null | lib/helpers_sqlite.ex | areski/excdr-pusher | 45389e29ab97d11585f22fe29fd7da54d9f31ef8 | [
"MIT"
] | 3 | 2017-08-15T10:43:40.000Z | 2018-11-06T09:12:49.000Z | defmodule ExCdrPusher.HSqlite do
require Logger
alias Application, as: App
@moduledoc """
This module contains method to communicate with SQLite CDRs storage.
"""
# Found CDRs not imported
def fetch_cdr do
case Sqlitex.open(App.fetch_env!(:excdr_pusher, :sqlite_db)) do
{:ok, db} ->
fetchsql = "SELECT OID, * FROM cdr WHERE imported=0 ORDER BY OID DESC LIMIT ?;"
cdrs = Sqlitex.query(db, fetchsql, bind: [App.fetch_env!(:excdr_pusher, :amount_cdr_fetch)])
Sqlitex.close(db)
cdrs
{:error, reason} ->
Logger.error(reason)
{:error}
end
end
# Mark those CDRs as imported to not fetch them twice
def mark_cdr_imported(cdrs) do
Logger.debug(fn -> "Mark CDRs: #{length(cdrs)}" end)
ids = Enum.map(cdrs, fn x -> x[:rowid] end)
questmarks = ids |> Enum.map(fn _ -> "?" end) |> Enum.join(", ")
sql = "UPDATE cdr SET imported=1 WHERE OID IN (" <> questmarks <> ")"
# IO.puts sql
case Sqlitex.open(App.fetch_env!(:excdr_pusher, :sqlite_db)) do
{:ok, db} ->
Sqlitex.query(db, sql, bind: ids)
Sqlitex.close(db)
{:error, reason} ->
Logger.error(reason)
{:error}
end
end
# Sqlitex.Server will not detect if the DB has an issue or corruption
# # Found CDRs not imported
# def fetch_cdr do
# Sqlitex.Server.query(
# Sqlitex.DB,
# "SELECT OID, * FROM cdr WHERE imported=0 ORDER BY OID DESC LIMIT ?;",
# bind: [App.fetch_env!(:excdr_pusher, :amount_cdr_fetch)]
# )
# end
# # Mark those CDRs as imported to not fetch them twice
# def mark_cdr_imported(cdrs) do
# Logger.debug(fn -> "Mark CDRs: #{length(cdrs)}" end)
# ids = Enum.map(cdrs, fn x -> x[:rowid] end)
# questmarks = ids |> Enum.map(fn _ -> "?" end) |> Enum.join(", ")
# sql = "UPDATE cdr SET imported=1 WHERE OID IN (" <> questmarks <> ")"
# Sqlitex.Server.query(Sqlitex.DB, sql, bind: ids)
# end
end
| 31.301587 | 100 | 0.613083 |
03d71bcf772b044396b7303926d07a6b7f51f2fe | 1,812 | ex | Elixir | 2020/liveview/dazzle/lib/dazzle_web/telemetry.ex | herminiotorres/programmer_passport | d1786518a3a5f82471457e0ace41c4c33343739a | [
"MIT"
] | null | null | null | 2020/liveview/dazzle/lib/dazzle_web/telemetry.ex | herminiotorres/programmer_passport | d1786518a3a5f82471457e0ace41c4c33343739a | [
"MIT"
] | null | null | null | 2020/liveview/dazzle/lib/dazzle_web/telemetry.ex | herminiotorres/programmer_passport | d1786518a3a5f82471457e0ace41c4c33343739a | [
"MIT"
] | null | null | null | defmodule DazzleWeb.Telemetry do
use Supervisor
import Telemetry.Metrics
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
@impl true
def init(_arg) do
children = [
# Telemetry poller will execute the given period measurements
# every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
# Add reporters as children of your supervision tree.
# {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
]
Supervisor.init(children, strategy: :one_for_one)
end
def metrics do
[
# Phoenix Metrics
summary("phoenix.endpoint.stop.duration",
unit: {:native, :millisecond}
),
summary("phoenix.router_dispatch.stop.duration",
tags: [:route],
unit: {:native, :millisecond}
),
# Database Metrics
summary("dazzle.repo.query.total_time", unit: {:native, :millisecond}),
summary("dazzle.repo.query.decode_time", unit: {:native, :millisecond}),
summary("dazzle.repo.query.query_time", unit: {:native, :millisecond}),
summary("dazzle.repo.query.queue_time", unit: {:native, :millisecond}),
summary("dazzle.repo.query.idle_time", unit: {:native, :millisecond}),
# VM Metrics
summary("vm.memory.total", unit: {:byte, :kilobyte}),
summary("vm.total_run_queue_lengths.total"),
summary("vm.total_run_queue_lengths.cpu"),
summary("vm.total_run_queue_lengths.io")
]
end
defp periodic_measurements do
[
# A module, function and arguments to be invoked periodically.
# This function must call :telemetry.execute/3 and a metric must be added above.
# {DazzleWeb, :count_users, []}
]
end
end
| 32.357143 | 86 | 0.67053 |
03d75b38184325c16d98c2ed22e53a45a96f0c5d | 322 | exs | Elixir | functionalprogramming/introduction/10_update_list.exs | nathanchere/HackerRank_Elixir | 607e4ae10d94edb20296c8979179648af4af3ca6 | [
"MIT"
] | 2 | 2016-09-20T11:56:33.000Z | 2016-10-20T20:11:04.000Z | functionalprogramming/introduction/10_update_list.exs | nathanchere/HackerRank_Elixir | 607e4ae10d94edb20296c8979179648af4af3ca6 | [
"MIT"
] | null | null | null | functionalprogramming/introduction/10_update_list.exs | nathanchere/HackerRank_Elixir | 607e4ae10d94edb20296c8979179648af4af3ca6 | [
"MIT"
] | null | null | null | # HackerRank "Functional Programming" - Update List
defmodule Solution do
defp print_absolute(input) do
String.to_integer(input)
|> abs
|> IO.puts
end
def main() do
IO.read(:stdio, :all) |> String.strip |> String.split("\n")
|> Enum.map(fn(x)->print_absolute(x)end)
end
end
Solution.main()
| 17.888889 | 63 | 0.652174 |
03d7c43872421e6f3a84cc6fa75629718a7ab93a | 2,015 | ex | Elixir | clients/jobs/lib/google_api/jobs/v3/model/google_cloud_talent_v4_job_processing_options.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/jobs/lib/google_api/jobs/v3/model/google_cloud_talent_v4_job_processing_options.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/jobs/lib/google_api/jobs/v3/model/google_cloud_talent_v4_job_processing_options.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Jobs.V3.Model.GoogleCloudTalentV4JobProcessingOptions do
@moduledoc """
Options for job processing.
## Attributes
* `disableStreetAddressResolution` (*type:* `boolean()`, *default:* `nil`) - If set to `true`, the service does not attempt to resolve a more precise address for the job.
* `htmlSanitization` (*type:* `String.t`, *default:* `nil`) - Option for job HTML content sanitization. Applied fields are: * description * applicationInfo.instruction * incentives * qualifications * responsibilities HTML tags in these fields may be stripped if sanitiazation isn't disabled. Defaults to HtmlSanitization.SIMPLE_FORMATTING_ONLY.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:disableStreetAddressResolution => boolean(),
:htmlSanitization => String.t()
}
field(:disableStreetAddressResolution)
field(:htmlSanitization)
end
defimpl Poison.Decoder, for: GoogleApi.Jobs.V3.Model.GoogleCloudTalentV4JobProcessingOptions do
def decode(value, options) do
GoogleApi.Jobs.V3.Model.GoogleCloudTalentV4JobProcessingOptions.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Jobs.V3.Model.GoogleCloudTalentV4JobProcessingOptions do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.3 | 348 | 0.759305 |
03d7d58f04a8f5eb68b3b76813f58e5495596a7a | 7,264 | ex | Elixir | lib/livebook/runtime/erl_dist/runtime_server.ex | rodrigues/livebook | 9822735bcf0b5bffbbc2bd59a7b942e81276ffe3 | [
"Apache-2.0"
] | null | null | null | lib/livebook/runtime/erl_dist/runtime_server.ex | rodrigues/livebook | 9822735bcf0b5bffbbc2bd59a7b942e81276ffe3 | [
"Apache-2.0"
] | null | null | null | lib/livebook/runtime/erl_dist/runtime_server.ex | rodrigues/livebook | 9822735bcf0b5bffbbc2bd59a7b942e81276ffe3 | [
"Apache-2.0"
] | null | null | null | defmodule Livebook.Runtime.ErlDist.RuntimeServer do
@moduledoc false
# A server process backing a specific runtime.
#
# This process handles `Livebook.Runtime` operations,
# like evaluation and completion. It spawns/terminates
# individual evaluators corresponding to evaluation
# containers as necessary.
#
# Every runtime server must have an owner process,
# to which the server lifetime is bound.
#
# For more specification see `Livebook.Runtime`.
use GenServer, restart: :temporary
alias Livebook.Evaluator
alias Livebook.Runtime
alias Livebook.Runtime.ErlDist
@await_owner_timeout 5_000
@doc """
Starts the manager.
Note: make sure to call `set_owner` within #{@await_owner_timeout}ms
or the runtime server assumes it's not needed and terminates.
"""
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, opts)
end
@doc """
Sets the owner process.
The owner process is monitored and as soon as it terminates,
the server also terminates. All the evaluation results are
send directly to the owner.
"""
@spec set_owner(pid(), pid()) :: :ok
def set_owner(pid, owner) do
GenServer.cast(pid, {:set_owner, owner})
end
@doc """
Evaluates the given code using an `Livebook.Evaluator`
process belonging to the given container and instructs
it to send all the outputs to the owner process.
If no evaluator exists for the given container, a new
one is started.
See `Livebook.Evaluator` for more details.
"""
@spec evaluate_code(pid(), String.t(), Runtime.locator(), Runtime.locator(), keyword()) :: :ok
def evaluate_code(pid, code, locator, prev_locator, opts \\ []) do
GenServer.cast(pid, {:evaluate_code, code, locator, prev_locator, opts})
end
@doc """
Removes the specified evaluation from the history.
See `Livebook.Evaluator` for more details.
"""
@spec forget_evaluation(pid(), Runtime.locator()) :: :ok
def forget_evaluation(pid, locator) do
GenServer.cast(pid, {:forget_evaluation, locator})
end
@doc """
Terminates the `Livebook.Evaluator` process that belongs
to the given container.
"""
@spec drop_container(pid(), Runtime.container_ref()) :: :ok
def drop_container(pid, container_ref) do
GenServer.cast(pid, {:drop_container, container_ref})
end
@doc """
Asynchronously sends an intellisense request to the server.
Completions are forwarded to `Livebook.Evaluator` process
that belongs to the given container. If there's no evaluator,
there's also no binding and environment, so a generic
completion is handled by a temporary process.
See `Livebook.Runtime` for more details.
"""
@spec handle_intellisense(
pid(),
pid(),
reference(),
Runtime.intellisense_request(),
Runtime.locator()
) :: :ok
def handle_intellisense(pid, send_to, ref, request, locator) do
GenServer.cast(pid, {:handle_intellisense, send_to, ref, request, locator})
end
@doc """
Stops the manager.
This results in all Livebook-related modules being unloaded
from the runtime node.
"""
@spec stop(pid()) :: :ok
def stop(pid) do
GenServer.stop(pid)
end
@impl true
def init(_opts) do
Process.send_after(self(), :check_owner, @await_owner_timeout)
{:ok, evaluator_supervisor} = ErlDist.EvaluatorSupervisor.start_link()
{:ok, completion_supervisor} = Task.Supervisor.start_link()
{:ok,
%{
owner: nil,
evaluators: %{},
evaluator_supervisor: evaluator_supervisor,
completion_supervisor: completion_supervisor
}}
end
@impl true
def handle_info(:check_owner, state) do
# If not owner has been set within @await_owner_timeout
# from the start, terminate the process.
if state.owner do
{:noreply, state}
else
{:stop, :no_owner, state}
end
end
def handle_info({:DOWN, _, :process, owner, _}, %{owner: owner} = state) do
{:stop, :normal, state}
end
def handle_info({:DOWN, _, :process, pid, reason}, state) do
state.evaluators
|> Enum.find(fn {_container_ref, evaluator_pid} ->
evaluator_pid == pid
end)
|> case do
{container_ref, _} ->
message = Exception.format_exit(reason)
send(state.owner, {:container_down, container_ref, message})
{:noreply, %{state | evaluators: Map.delete(state.evaluators, container_ref)}}
nil ->
{:noreply, state}
end
end
def handle_info(_message, state), do: {:noreply, state}
@impl true
def handle_cast({:set_owner, owner}, state) do
Process.monitor(owner)
{:noreply, %{state | owner: owner}}
end
def handle_cast(
{:evaluate_code, code, {container_ref, evaluation_ref}, prev_locator, opts},
state
) do
state = ensure_evaluator(state, container_ref)
prev_evaluation_ref =
case prev_locator do
{^container_ref, evaluation_ref} ->
evaluation_ref
{parent_container_ref, evaluation_ref} ->
Evaluator.initialize_from(
state.evaluators[container_ref],
state.evaluators[parent_container_ref],
evaluation_ref
)
nil
end
Evaluator.evaluate_code(
state.evaluators[container_ref],
state.owner,
code,
evaluation_ref,
prev_evaluation_ref,
opts
)
{:noreply, state}
end
def handle_cast({:forget_evaluation, {container_ref, evaluation_ref}}, state) do
with {:ok, evaluator} <- Map.fetch(state.evaluators, container_ref) do
Evaluator.forget_evaluation(evaluator, evaluation_ref)
end
{:noreply, state}
end
def handle_cast({:drop_container, container_ref}, state) do
state = discard_evaluator(state, container_ref)
{:noreply, state}
end
def handle_cast({:handle_intellisense, send_to, ref, request, locator}, state) do
{container_ref, evaluation_ref} = locator
evaluator = state.evaluators[container_ref]
if evaluator != nil and elem(request, 0) not in [:format] do
Evaluator.handle_intellisense(evaluator, send_to, ref, request, evaluation_ref)
else
# Handle the request in a temporary process using an empty evaluation context
Task.Supervisor.start_child(state.completion_supervisor, fn ->
binding = []
env = :elixir.env_for_eval([])
response = Livebook.Intellisense.handle_request(request, binding, env)
send(send_to, {:intellisense_response, ref, request, response})
end)
end
{:noreply, state}
end
defp ensure_evaluator(state, container_ref) do
if Map.has_key?(state.evaluators, container_ref) do
state
else
{:ok, evaluator} = ErlDist.EvaluatorSupervisor.start_evaluator(state.evaluator_supervisor)
Process.monitor(evaluator)
%{state | evaluators: Map.put(state.evaluators, container_ref, evaluator)}
end
end
defp discard_evaluator(state, container_ref) do
case Map.fetch(state.evaluators, container_ref) do
{:ok, evaluator} ->
ErlDist.EvaluatorSupervisor.terminate_evaluator(state.evaluator_supervisor, evaluator)
%{state | evaluators: Map.delete(state.evaluators, container_ref)}
:error ->
state
end
end
end
| 28.598425 | 96 | 0.682269 |
03d7ff4180e022299376b9f41b21a9e748b7d2b0 | 1,501 | ex | Elixir | lib/rocketpay_web/controllers/accounts_controller.ex | willianns/rocketpay | 34c882b47ab1cb2a83b51c6bb17eeceb7714ab92 | [
"Unlicense"
] | 2 | 2021-03-01T09:15:57.000Z | 2021-03-02T23:30:57.000Z | lib/rocketpay_web/controllers/accounts_controller.ex | willianns/rocketpay | 34c882b47ab1cb2a83b51c6bb17eeceb7714ab92 | [
"Unlicense"
] | null | null | null | lib/rocketpay_web/controllers/accounts_controller.ex | willianns/rocketpay | 34c882b47ab1cb2a83b51c6bb17eeceb7714ab92 | [
"Unlicense"
] | null | null | null | defmodule RocketpayWeb.AccountsController do
use RocketpayWeb, :controller
alias Rocketpay.Account
alias Rocketpay.Accounts.Transactions.Response, as: TransactionResponse
action_fallback RocketpayWeb.FallbackController
plug :authorize_operation when action in [:withdraw, :deposit]
plug :authorize_transaction when action in [:transaction]
def deposit(conn, params) do
with {:ok, %Account{} = account} <- Rocketpay.deposit(params) do
conn
|> put_status(:ok)
|> render("update.json", account: account)
end
end
def withdraw(conn, params) do
with {:ok, %Account{} = account} <- Rocketpay.withdraw(params) do
conn
|> put_status(:ok)
|> render("update.json", account: account)
end
end
def transaction(conn, params) do
with {:ok, %TransactionResponse{} = transaction} <- Rocketpay.transaction(params) do
conn
|> put_status(:ok)
|> render("transaction.json", transaction: transaction)
end
end
defp authorize_operation(conn, _params) do
%{params: %{"id" => id}} = conn
if conn.assigns.current_user.account_id == id do
conn
else
conn
|> put_status(:unauthorized)
|> text("")
|> halt()
end
end
defp authorize_transaction(conn, _params) do
%{params: %{"from" => id}} = conn
if conn.assigns.current_user.account_id == id do
conn
else
conn
|> put_status(:unauthorized)
|> text("")
|> halt()
end
end
end
| 24.606557 | 88 | 0.644237 |
03d8203c4c0e2c862cdf1fc3cb7084236930ef84 | 2,661 | ex | Elixir | lib/ueberauth/strategy/twitch/oauth.ex | ryanwinchester/ueberauth_twitch | 8152588078f4ec116400fcfd94a295b3bfe67ab4 | [
"MIT"
] | 2 | 2020-05-21T19:29:32.000Z | 2020-07-26T19:25:49.000Z | lib/ueberauth/strategy/twitch/oauth.ex | ryanwinchester/ueberauth_twitch | 8152588078f4ec116400fcfd94a295b3bfe67ab4 | [
"MIT"
] | null | null | null | lib/ueberauth/strategy/twitch/oauth.ex | ryanwinchester/ueberauth_twitch | 8152588078f4ec116400fcfd94a295b3bfe67ab4 | [
"MIT"
] | 3 | 2020-06-24T00:30:08.000Z | 2020-11-19T01:49:37.000Z | defmodule Ueberauth.Strategy.Twitch.OAuth do
@moduledoc """
OAuth2 for Twitch.
Add `client_id` and `client_secret` to your configuration:
config :ueberauth, Ueberauth.Strategy.Twitch.OAuth,
client_id: System.get_env("TWITCH_APP_ID"),
client_secret: System.get_env("TWITCH_APP_SECRET")
"""
use OAuth2.Strategy
@defaults [
authorize_url: "https://id.twitch.tv/oauth2/authorize",
headers: [{"Content-Type", "application/x-www-form-urlencoded"}],
site: "https://api.twitch.tv",
strategy: __MODULE__,
token_url: "https://id.twitch.tv/oauth2/token",
]
@doc false
def options(opts \\ [], app \\ Application) do
config = app.get_env(:ueberauth, Ueberauth.Strategy.Twitch.OAuth)
@defaults
|> Keyword.merge(config)
|> Keyword.merge(opts)
end
@doc """
Generate Authentication: Basic Base64<CLIENT_ID>:<CLIENT_SECRET>
"""
def auth_sig(opts \\ []) do
opts = options(opts)
sig = Base.encode64(opts[:client_id] <> ":" <> opts[:client_secret])
"Basic #{sig}"
end
@doc """
Construct a client for requests to Twitch.
This will be setup automatically for you in `Ueberauth.Strategy.Twitch`.
These options are only useful for usage outside the normal callback phase of Ueberauth.
"""
def client(opts \\ []) do
OAuth2.Client.new(options(opts))
|> OAuth2.Client.put_serializer("application/json", Jason)
end
@doc """
Construct a signed client for token and refresh token requests
"""
def signed_client(opts \\ []) do
opts
|> client
|> put_header("Authorization", auth_sig(opts))
end
@doc """
Provides the authorize url for the request phase of Ueberauth. No need to call this usually.
client_id:client_secret
"""
def authorize_url!(params \\ [], opts \\ []) do
opts
|> client
|> OAuth2.Client.authorize_url!(params)
end
def get(token, url, headers \\ [], opts \\ []) do
client([token: token])
|> OAuth2.Client.get(url, headers, opts)
end
def get_token!(params \\ [], opts \\ []) do
IO.inspect("redirect_uri")
IO.inspect(opts[:redirect_uri])
client = opts
|> signed_client
client = %{ client | client_secret: opts[:client_secret], redirect_uri: opts[:redirect_uri] }
client
|> put_param("client_secret", opts[:client_secret])
|> OAuth2.Client.get_token!(params)
end
# Strategy Callbacks
def authorize_url(client, params) do
OAuth2.Strategy.AuthCode.authorize_url(client, params)
end
def get_token(client, params, headers) do
client
|> put_header("Accept", "application/json")
|> OAuth2.Strategy.AuthCode.get_token(params, headers)
end
end | 27.43299 | 97 | 0.672304 |
03d8487e5fd64c8318c298779c933f6375c330fa | 19,083 | ex | Elixir | lib/email_service/entities/email/binding/substitution/dynamic.ex | noizu/KitchenSink | 34f51fb93dfa913ba7be411475d02520d537e676 | [
"MIT"
] | 2 | 2019-04-15T22:17:59.000Z | 2022-01-03T15:35:36.000Z | lib/email_service/entities/email/binding/substitution/dynamic.ex | noizu/KitchenSink | 34f51fb93dfa913ba7be411475d02520d537e676 | [
"MIT"
] | null | null | null | lib/email_service/entities/email/binding/substitution/dynamic.ex | noizu/KitchenSink | 34f51fb93dfa913ba7be411475d02520d537e676 | [
"MIT"
] | null | null | null | #-------------------------------------------------------------------------------
# Author: Keith Brings
# Copyright (C) 2020 Noizu Labs, Inc. All rights reserved.
#-------------------------------------------------------------------------------
defmodule Noizu.EmailService.Email.Binding.Substitution.Dynamic do
@vsn 1.0
alias Noizu.EmailService.Email.Binding.Substitution.Dynamic.Selector
alias Noizu.EmailService.Email.Binding.Substitution.Dynamic.Section
alias Noizu.EmailService.Email.Binding.Substitution.Dynamic.Error
alias Noizu.EmailService.Email.Binding.Substitution.Dynamic.Effective
alias Noizu.EmailService.Email.Binding.Substitution.Dynamic.Formula
alias Noizu.EmailService.Email.Binding.Substitution.Dynamic, as: Binding
@type t :: %__MODULE__{
version: any,
current_token: {integer, String.t} | nil,
section_stack: [Section.t],
errors: list,
last_error: Error.t,
outcome: atom,
meta: Map.t,
vsn: float,
}
defstruct [
version: nil,
current_token: nil,
section_stack: [%Section{}],
errors: [],
last_error: nil,
outcome: :ok,
meta: %{},
vsn: @vsn
]
def effective_bindings(%__MODULE__{} = this, input, context, options) do
default_extractor = &__MODULE__.variable_extractor/4
options = update_in(options || %{}, [Access.key(:variable_extractor)], &(&1 || default_extractor))
state = options[:state] || %Noizu.RuleEngine.State.InlineStateManager{}
state = Noizu.RuleEngine.StateProtocol.put!(state, :bind_space, input, context)
{response, state} = Noizu.RuleEngine.ScriptProtocol.execute!(this, state, context, options)
%Effective{response| outcome: length(response.unbound.required) > 0 && {:error, :unbound_fields} || :ok}
end
def variable_extractor(selector, state, context, options) do
{blob, state} = Noizu.RuleEngine.StateProtocol.get!(state, :bind_space, context)
{bound?,val, state} = Selector.bound_inner(selector, blob, state, context, options)
{bound? && {:value, val}, state}
end
#----------------------------
# extract/2
#----------------------------
@doc """
Parse input string and extract (including conditional qualifiers) data needed to populate template.
"""
def extract(block, options \\ %{})
def extract(%SendGrid.Template.Version{} = version, options) do
this = extract((version.subject || "") <> (version.html_content || "") <> (version.plain_content || ""), options)
%__MODULE__{this| version: version.id}
end
def extract(block, options) when is_bitstring(block) do
state = %__MODULE__{}
# 1. Reshape bindings
block = String.replace(block, ~r/\{\{!bind /, "{{#bind ")
# 1. Explicit Variable Bind
cond do
state.outcome != :ok ->
mark_error(state, :invalid_required_binding, options)
|> put_in([Access.key(:outcome)], :fatal_error)
:else ->
# 2. Strip comment sections that allow nested bars.
block = String.replace(block, ~r/\{\{!--.*--\}\}/U, "")
# 3. Strip plain comments.
block = String.replace(block, ~r/\{\{!.*\}\}/U, "")
# 4. Parse Tokens and Specifiers
case Regex.scan(~r/\{\{([^\}]+)\}\}/, block, capture: :all_but_first) do
v when is_list(v) ->
List.flatten(v)
|> Enum.reduce_while(state, fn(token, acc) ->
case extract_token({token, acc}, options) do
{:halt, this} -> {:halt, this}
{:cont, this} -> {:cont, this}
this -> {:cont, this}
end
end)
_ -> state
end
end
end
#----------------------------
# extract_token/2
#----------------------------
def extract_token({token, this}, options) do
# compress and build formula tree
token = String.trim(token)
this = case this.current_token do
nil -> %__MODULE__{this| current_token: {0, token}}
{index, _previous_token} -> %__MODULE__{this| current_token: {index + 1, token}}
end
case token do
"#bind " <> clause ->
case extract_selector(this, "!bind " <> clause, options) do
{:error, this} -> {:halt, this}
{clause, this} ->
this = require_binding(this, clause, options)
{:cont, this}
end
# Begin Built-in
"#" <> _clause ->
case extract_token__section_open(this, token, options) do
{:halt, this} -> {:halt, this}
{:cont, this} -> {:cont, this}
this -> {:cont, this}
end
"else" <> clause ->
case extract_token__section_open(this, token, options) do
{:halt, this} -> {:halt, this}
{:cont, this} -> {:cont, this}
this -> {:cont, this}
end
# End Built-In
"/" <> _clause ->
case extract_token__section_close(this, token, options) do
{:halt, this} -> {:halt, this}
{:cont, this} -> {:cont, this}
this -> {:cont, this}
end
# Specifiers
_ ->
case extract_selector(this, token, options) do
{:error, this} -> {:halt, this}
{clause, this} ->
this = require_binding(this, clause, options)
{:cont, this}
end
end
end
#----------------------------
#
#----------------------------
def extract_token__section_open(this, token, options) do
token = String.trim(token)
case token do
"else " <> clause -> extract_token__section_open__enter(:extended_else, clause, this, options)
"else" -> extract_token__section_open__enter(:else, nil, this, options)
"#if " <> clause -> extract_token__section_open__enter(:if, clause, this, options)
"#unless " <> clause -> extract_token__section_open__enter(:unless, clause, this, options)
"#each" <> clause -> extract_token__section_open__enter(:each, clause, this, options)
"#with" <> clause -> extract_token__section_open__enter(:with, clause, this, options)
"#" <> _clause ->
case Regex.run(~r/^#([\w\.]+)(\s.*\|.*)?$/U, token, capture: :all_but_first) do
[section, clause] -> extract_token__section_open__enter({:unsupported, section}, clause, this, options)
[section] -> extract_token__section_open__enter({:unsupported, section}, nil, this, options)
end
_ -> fatal_error(this, {:section_open, token})
end
end
#----------------------------
#
#----------------------------
def extract_token__section_open__enter(:extended_else, clause, this, options) do
cond do
Regex.match?(~r/^\s*if.*$/, clause) ->
case Regex.run(~r/^\s*if\s*(.*$)/, clause, capture: :all_but_first) do
[m] ->
case extended_extract_selector(this, String.trim(m), options) do
{:error, this} -> {:halt, this}
{{clause, pipes}, this} ->
[head|tail] = this.section_stack
new_section = Section.spawn(head, :extended_else, clause, options)
this = %__MODULE__{this| section_stack: [new_section|this.section_stack]}
end
end
Regex.match?(~r/^\s*unless.*$/, clause) ->
case Regex.run(~r/^\s*unless\s*(.*$)/, clause, capture: :all_but_first) do
[m] ->
case extended_extract_selector(this, String.trim(m), options) do
{:error, this} -> {:halt, this}
{{clause, pipes}, this} ->
[head|tail] = this.section_stack
clause = Formula.negate(clause)
new_section = Section.spawn(head, :extended_else, clause, options)
this = %__MODULE__{this| section_stack: [new_section|this.section_stack]}
end
end
:else -> extract_token__section_open__enter(:else, nil, this, options)
end
end
def extract_token__section_open__enter(section, clause, this, options) do
case extended_extract_selector(this, clause, options) do
{:error, this} -> {:halt, this}
{{clause, pipes}, this} ->
# Deprecate, section collapse process takes care of forced requirements, formula process takes care of ensuring required bindings for if/unless clauses loaded.
# this = require_binding(this, clause, options)
# append new section
[head|tail] = this.section_stack
new_section = Section.spawn(head, section, clause, options)
this = %__MODULE__{this| section_stack: [new_section|this.section_stack]}
# update this or bindings if each or with
case section do
:with ->
cond do
as = pipes[:as] -> add_alias(this, clause, as)
:else -> current_selector(this, clause)
end
:each ->
clause = Selector.wildcard(clause)
cond do
as = pipes[:as] -> add_alias(this, clause, as)
:else -> current_selector(this, clause)
end
_ -> this
end
end
end
#----------------------------
#
#----------------------------
def extract_token__section_close(this, token, options) do
token = String.trim(token)
case token do
"/if" <> clause -> extract_token__section_close__exit(:if, this, options)
"/unless" <> clause -> extract_token__section_close__exit(:unless, this, options)
"/each" <> clause -> extract_token__section_close__exit(:each, this, options)
"/with" <> clause -> extract_token__section_close__exit(:with, this, options)
"/" <> section -> extract_token__section_close__exit({:unsupported, String.trim(section)}, this, options)
_ -> fatal_error(this, {:section_close, token}, options)
end
end
#----------------------------
# extract_token__section_close__exit
#----------------------------
def extract_token__section_close__exit(section, this, options) do
#TODO output for processing structure needs to be built here.
#E.g nested section tree.
case extract_token__section_close__exit_match(section, this, options) do
{:error, cause} -> fatal_error(this, cause, options)
this -> this
end
end
#----------------------------
# extract_token__section_close__exit_match
#----------------------------
def extract_token__section_close__exit_match(section, %__MODULE__{section_stack: [h,p|tail]} = this, options) do
cond do
h.section == section ->
p = Section.collapse(p, h, options)
%__MODULE__{this| section_stack: [p|tail]}
(h.section == :else || h.section == :extended_else) && section == p.section && (section == :if || section == :unless) ->
[e,i_u,p|t] = this.section_stack
p = Section.collapse(p, i_u, e, options)
%__MODULE__{this| section_stack: [p|t]}
(h.section == :else || h.section == :extended_else) && (p.section == :extended_else) ->
case Section.collapse_daisy_chain(this.section_stack, options) do
{:error, details} -> {:error, details}
tail -> %__MODULE__{this| section_stack: tail}
end
Kernel.match?({:unsupported, _}, h.section) ->
# Allow non closed unsupported sections, unwrap until end of list or match.
p = Section.collapse(p, h, options)
extract_token__section_close__exit_match(section, %__MODULE__{this| section_stack: [p|tail]}, options)
:else -> {:error, {:tag_close_mismatch, section}}
end
end
def matches(this) do
Section.matches(get_in(this, [Access.key(:section_stack), Access.at(0)]))
end
#----------------------------
# current_selector/1
#----------------------------
def current_selector(this) do
Section.current_selector(get_in(this, [Access.key(:section_stack), Access.at(0)]))
end
#----------------------------
# current_selector/2
#----------------------------
def current_selector(this, value) do
update_in(this, [Access.key(:section_stack), Access.at(0)], &(Section.current_selector(&1, value)))
end
#----------------------------
# add_alias/2
#----------------------------
def add_alias(this, clause, at) do
update_in(this, [Access.key(:section_stack), Access.at(0)], &(Section.add_alias(&1, clause, at)))
end
#----------------------------
# require_binding
#----------------------------
def require_binding(this, nil, _options) do
this
end
def require_binding(this, %Selector{} = binding, options) do
update_in(this, [Access.key(:section_stack), Access.at(0)], &(Section.require_binding(&1, binding, options)))
end
def fatal_error(this, cause, options \\ %{}) do
{:halt, mark_error(this, cause, options)}
end
#----------------------------
# mark_error
#----------------------------
def mark_error(this, cause, options \\ %{})
def mark_error(this, cause, options) do
error = %Error{error: cause, token: this.current_token}
this
|> update_in([Access.key(:section_stack), Access.at(0)], &(Section.mark_error(&1, error, options)))
|> put_in([Access.key(:last_error)], error)
end
def parse_pipes(nil), do: nil
def parse_pipes([]), do: nil
def parse_pipes([v]), do: parse_pipes(v)
def parse_pipes(pipe) do
case Regex.run(~r/as \|\s*([a-zA-Z0-9_]+)?\s*\|/, pipe, capture: :all_but_first) do
[h|t] -> %{as: h}
_ -> %{}
end
end
#----------------------------
# extract_selector/3
#----------------------------
@doc """
Parse clause to extract any operations/formulas and or bound variables.
@returns Dynamic.Binding.Selector or Dynamic.Binding.HandleBarClause
"""
def extract_selector(this, token, options \\ %{}) do
case extended_extract_selector(this, token, options) do
{:error, this} -> {:error, this}
{{clause, _meta}, this} -> {clause, this}
end
end
#----------------------------
# extract_selector/3
#----------------------------
@doc """
Parse clause to extract any operations/formulas and or bound variables.
@returns Dynamic.Binding.Selector or Dynamic.Binding.HandleBarClause
"""
def extended_extract_selector(this, token, options \\ %{})
def extended_extract_selector(this, nil, _options), do: {{nil, nil}, this}
def extended_extract_selector(this, token, options) do
token = String.trim(token)
cond do
token == "this" || token == "." ->
selector = current_selector(this)
if Selector.valid?(selector, options) do
{{selector, nil}, this}
else
{:error, mark_error(this, {:extract_clause, :this, :invalid}, options)}
end
token == "../" ->
selector = current_selector(this)
case Selector.parent(selector, nil, options) do
{:error, clause} ->
{:error, mark_error(this, clause, options)}
selector -> {selector, this}
end
:else ->
case token do
"../" <> _relative ->
clean_token = Regex.replace(~r/\|.*$/, token, "") # strip any pipes
selector = current_selector(this)
pipes = parse_pipes(token)
case Selector.relative(selector, clean_token, parse_pipes(token), options) do
{:error, clause} ->
{:error, mark_error(this, clause, options)}
selector -> {selector, this}
end
"!bind " <> clause ->
case extended_extract_selector(this, clause, options) do
{{%Selector{}, _meta} = selector, this} -> {selector, this}
{:error, this} -> {:error, this}
{_, _this} -> {:error, mark_error(this, :rule_engine_pending, options)}
end
"this." <> clause ->
case Regex.run(~r/^this((?:[\.\[\]]@?[a-zA-Z0-9_]+)*)\]?(\s.*\|.*)?$/, token, capture: :all_but_first) do
[""|_] -> {:error, mark_error(this, :parse, options)}
[b|c] -> {:error, mark_error(this, clause, options)}
b = Regex.scan(~r/[\.\[\]]@?[a-zA-Z0-9_]+/, b) |> List.flatten()
case Selector.extend(current_selector(this), b, parse_pipes(c)) do
selector = {%Selector{}, _meta} -> {selector, this}
{:error, clause} -> {:error, mark_error(this, clause, options)}
end
_ ->
{:error, mark_error(this, {:invalid_token, token}, options)}
end
token ->
case Regex.run(~r/^([a-zA-Z0-9_]+)((?:[\.\[\]]@?[a-zA-Z0-9_]+)*)\]?(\s.*\|.*)?$/, token, capture: :all_but_first) do
[a,b|c] ->
b = Regex.scan(~r/[\.\[\]]@?[a-zA-Z0-9_]+/, b) |> List.flatten()
# todo check for existing match
case Selector.new([a] ++ b, parse_pipes(c), matches(this)) do
selector = {%Selector{}, _meta} -> {selector, this}
{:error, clause} -> {:error, mark_error(this, clause, options)}
end
_ ->
{:error, mark_error(this, {:invalid_token, token}, options)}
end
end
end
end
end
defimpl Noizu.RuleEngine.ScriptProtocol, for: Noizu.EmailService.Email.Binding.Substitution.Dynamic do
alias Noizu.RuleEngine.Helper
alias Noizu.EmailService.Email.Binding.Substitution.Dynamic.Effective
#-----------------
# execute!/3
#-----------------
def execute!(this, state, context), do: execute!(this, state, context, %{})
#-----------------
# execute!/4
#-----------------
def execute!(this, state, context, options) do
[root] = this.section_stack
{r, s} = Noizu.RuleEngine.ScriptProtocol.execute!(root, state, context, options)
Effective.finalize(r,s, context, options)
end
#---------------------
# identifier/3
#---------------------
def identifier(this, _state, _context), do: Helper.identifier(this)
#---------------------
# identifier/4
#---------------------
def identifier(this, _state, _context, _options), do: Helper.identifier(this)
#---------------------
# render/3
#---------------------
def render(this, state, context), do: render(this, state, context, %{})
#---------------------
# render/4
#---------------------
def render(this, state, context, options) do
depth = options[:depth] || 0
prefix = (depth == 0) && (">> ") || (String.duplicate(" ", ((depth - 1) * 4) + 3) <> "|-- ")
id = identifier(this, state, context, options)
v = "#{inspect this.selector}"
t = String.slice(v, 0..64)
t = if (t != v), do: t <> "...", else: t
"#{prefix}#{id} [VALUE #{t}]\n"
end
end | 39.427686 | 168 | 0.54483 |
03d874be2bb84ccc1b94932a4aa6a6f8ac81e3c2 | 557 | ex | Elixir | samples/client/petstore/elixir/lib/open_api_petstore/model/dog.ex | kymbalon/openapi-generator | 8327a920408314aacb7d00f64285ae88e9195633 | [
"Apache-2.0"
] | 2 | 2019-03-26T11:04:18.000Z | 2021-01-03T10:54:10.000Z | samples/client/petstore/elixir/lib/open_api_petstore/model/dog.ex | kymbalon/openapi-generator | 8327a920408314aacb7d00f64285ae88e9195633 | [
"Apache-2.0"
] | 7 | 2021-03-01T21:26:03.000Z | 2022-02-27T10:10:20.000Z | samples/client/petstore/elixir/lib/open_api_petstore/model/dog.ex | kymbalon/openapi-generator | 8327a920408314aacb7d00f64285ae88e9195633 | [
"Apache-2.0"
] | 4 | 2019-04-08T17:06:09.000Z | 2020-06-09T18:16:08.000Z | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule OpenAPIPetstore.Model.Dog do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"className",
:"color",
:"breed"
]
@type t :: %__MODULE__{
:"className" => String.t,
:"color" => String.t | nil,
:"breed" => String.t | nil
}
end
defimpl Poison.Decoder, for: OpenAPIPetstore.Model.Dog do
def decode(value, _options) do
value
end
end
| 18.566667 | 91 | 0.642729 |
03d882fa6a07c74682cf26a0bd7f6209f73ae415 | 987 | exs | Elixir | test/mix/ecto_test.exs | larryweya/ecto | d0d1fd43f0f97856a119184163167a7e79574923 | [
"Apache-2.0"
] | 1 | 2022-01-24T07:32:34.000Z | 2022-01-24T07:32:34.000Z | test/mix/ecto_test.exs | larryweya/ecto | d0d1fd43f0f97856a119184163167a7e79574923 | [
"Apache-2.0"
] | 2 | 2020-04-23T00:19:07.000Z | 2020-04-23T00:24:25.000Z | test/mix/ecto_test.exs | larryweya/ecto | d0d1fd43f0f97856a119184163167a7e79574923 | [
"Apache-2.0"
] | 1 | 2020-10-07T16:52:00.000Z | 2020-10-07T16:52:00.000Z | defmodule Mix.EctoTest do
use ExUnit.Case, async: true
import Mix.Ecto
test "parse repo" do
assert parse_repo(["-r", "Repo"]) == [Repo]
assert parse_repo(["--repo", Repo]) == [Repo]
assert parse_repo(["-r", "Repo", "-r", "Repo2"]) == [Repo, Repo2]
assert parse_repo(["-r", "Repo", "--quiet"]) == [Repo]
assert parse_repo(["-r", "Repo", "-r", "Repo2", "--quiet"]), [Repo, Repo2]
# Warning
assert parse_repo([]) == []
# No warning
Application.put_env(:ecto, :ecto_repos, [Foo.Repo])
assert parse_repo([]) == [Foo.Repo]
after
Application.delete_env(:ecto, :ecto_repos)
end
defmodule Repo do
def __adapter__ do
Ecto.TestAdapter
end
def config do
[priv: Process.get(:priv), otp_app: :ecto]
end
end
test "ensure repo" do
assert ensure_repo(Repo, []) == Repo
assert_raise Mix.Error, fn -> ensure_repo(String, []) end
assert_raise Mix.Error, fn -> ensure_repo(NotLoaded, []) end
end
end
| 25.973684 | 78 | 0.605876 |
03d887619b710952725a68aa56c05b2d64714fbd | 1,418 | ex | Elixir | clients/books/lib/google_api/books/v1/model/usersettings_notification_price_drop.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/books/lib/google_api/books/v1/model/usersettings_notification_price_drop.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/books/lib/google_api/books/v1/model/usersettings_notification_price_drop.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Books.V1.Model.UsersettingsNotificationPriceDrop do
@moduledoc """
## Attributes
* `opted_state` (*type:* `String.t`, *default:* `nil`) -
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:opted_state => String.t()
}
field(:opted_state)
end
defimpl Poison.Decoder, for: GoogleApi.Books.V1.Model.UsersettingsNotificationPriceDrop do
def decode(value, options) do
GoogleApi.Books.V1.Model.UsersettingsNotificationPriceDrop.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Books.V1.Model.UsersettingsNotificationPriceDrop do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.170213 | 90 | 0.745416 |
03d88a0d4d6933072571aa37bc0132ff0bf1a198 | 2,330 | exs | Elixir | benchmarks/intermediate_algo.exs | soyjeansoy/freecodecamp-exercises-using-elixir | 62846dc9017aa23163039b3fc38c3bec26fcb962 | [
"MIT"
] | null | null | null | benchmarks/intermediate_algo.exs | soyjeansoy/freecodecamp-exercises-using-elixir | 62846dc9017aa23163039b3fc38c3bec26fcb962 | [
"MIT"
] | null | null | null | benchmarks/intermediate_algo.exs | soyjeansoy/freecodecamp-exercises-using-elixir | 62846dc9017aa23163039b3fc38c3bec26fcb962 | [
"MIT"
] | null | null | null | defmodule Benchmark.IntermediateAlgo do
alias Freecodecamp.IntermediateAlgo
@spec run(String.t(), module(), any()) :: module()
def run(function_name, formatter, args \\ nil),
do: __MODULE__ |> apply(String.to_atom(function_name), [formatter, args])
# set config for benchmark here
defp generic_benchee(jobs, formatter, before_each_function) do
Benchee.run(
jobs,
before_each: before_each_function,
time: 5,
memory_time: 2,
warmup: 4,
formatters: [formatter]
)
end
@spec sum_all(module(), any()) :: module()
def sum_all(formatter, _args) do
generic_benchee(
%{
"IntermediateAlgo.sum_all" => fn {int1, int2} ->
IntermediateAlgo.sum_all([int1, int2])
end,
"Enum.reduce" => fn {int1, int2} ->
sum_all_gen([int1, int2])
end,
"Enum.sum" => fn {int1, int2} ->
sum_all_gen_v2([int1, int2])
end,
"list comprehension" => fn {int1, int2} ->
sum_all_gen_v3([int1, int2])
end
},
formatter,
fn _ ->
{
gen_int_input(10_000),
gen_int_input(10_000)
}
end
)
end
##################################################
### Below are helpers for the main functions above
##################################################
@spec gen_int_input(integer) :: integer()
defp gen_int_input(number) do
StreamData.integer(-number..number)
|> Enum.take(number)
|> Enum.random()
end
@spec sum_all_gen(list(integer)) :: integer
defp sum_all_gen([num_one, num_two] = _list) do
Enum.reduce(num_one..num_two, &(&1 + &2))
end
@spec sum_all_gen_v2(list(integer)) :: integer
defp sum_all_gen_v2([num_one, num_two] = _list) do
Enum.sum(num_one..num_two)
end
@spec sum_all_gen_v3(list(integer)) :: integer
def sum_all_gen_v3([0, 0]), do: 0
def sum_all_gen_v3([num_one, num_two] = _list) do
for(x <- num_one..num_two, do: x)
|> do_sum_all_v3()
end
defp do_sum_all_v3([]), do: 0
defp do_sum_all_v3([h | t] = _list), do: h + do_sum_all_v3(t)
end
alias Benchmark.IntermediateAlgo
alias Benchee.Formatters.{HTML, Console}
# IntermediateAlgo.run("mutation", HTML)
IntermediateAlgo.run("sum_all", Console)
# Available functions (uncomment above):
# - sum_all
| 26.477273 | 77 | 0.599142 |
03d8c7aa3abba810ba7b5343cbce4012a2fc5e73 | 10,965 | ex | Elixir | apps/engine/test/support/db/factory.ex | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | 4 | 2020-11-30T17:38:57.000Z | 2021-01-23T21:29:41.000Z | apps/engine/test/support/db/factory.ex | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | 24 | 2020-11-30T17:32:48.000Z | 2021-02-22T06:25:22.000Z | apps/engine/test/support/db/factory.ex | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | null | null | null | defmodule Engine.DB.Factory do
@moduledoc """
Factories for our Ecto Schemas.
"""
use ExMachina.Ecto, repo: Engine.Repo
alias Engine.DB.Block
alias Engine.DB.Fee
alias Engine.DB.Output
alias Engine.DB.Transaction
alias Engine.DB.TransactionFee
alias Engine.Ethereum.RootChain.Event
alias Engine.Support.TestEntity
alias ExPlasma.Builder
alias ExPlasma.Output.Position
alias ExPlasma.Transaction, as: ExPlasmaTx
alias ExPlasma.Transaction.Type.Fee, as: ExPlasmaFee
@eth <<0::160>>
def output_piggyback_event_factory(attr \\ %{}) do
tx_hash = Map.get(attr, :tx_hash, <<1::256>>)
index = Map.get(attr, :output_index, 0)
params =
attr
|> Map.put(:signature, "InFlightExitOutputPiggybacked(address,bytes32,uint16)")
|> Map.put(:data, %{
"tx_hash" => tx_hash,
"output_index" => index
})
build(:event, params)
end
def in_flight_exit_started_event_factory(attr \\ %{}) do
params =
attr
|> Map.put(:signature, "InFlightExitStarted(address,bytes32)")
|> Map.put(:data, %{
"initiator" => Map.get(attr, :initiator, <<1::160>>),
"tx_hash" => Map.get(attr, :tx_hash, <<1::256>>),
"input_utxos_pos" => Map.get(attr, :positions, [1_000_000_000])
})
build(:event, params)
end
def exit_started_event_factory(attr \\ %{}) do
position = Map.get(attr, :position, 1_000_000_000)
params =
attr
|> Map.put(:signature, "ExitStarted(address,uint160)")
|> Map.put(:data, %{
"utxo_pos" => position
})
build(:event, params)
end
def deposit_event_factory(attr \\ %{}) do
params =
attr
|> Map.put(:signature, "DepositCreated(address,uint256,address,uint256)")
|> Map.put(:data, %{
"amount" => Map.get(attr, :amount, 1),
"blknum" => Map.get(attr, :blknum, 1),
"token" => Map.get(attr, :token, @eth),
"depositor" => Map.get(attr, :depositor, <<1::160>>)
})
build(:event, params)
end
def event_factory(attr \\ %{}) do
signature = Map.get(attr, :signature, "FooCalled()")
data = Map.get(attr, :data, attr)
height = Map.get(attr, :height, 100)
log_index = Map.get(attr, :log_index, 1)
root_chain_tx_hash = Map.get(attr, :log_index, <<1::160>>)
%Event{
data: data,
eth_height: height,
event_signature: signature,
log_index: log_index,
root_chain_tx_hash: root_chain_tx_hash
}
end
def deposit_output_factory(attr \\ %{}) do
entity = TestEntity.alice()
default_blknum = sequence(:deposit_output_blknum, fn seq -> seq + 1 end)
blknum = Map.get(attr, :blknum, default_blknum)
output_guard = Map.get(attr, :output_guard, entity.addr)
amount = Map.get(attr, :amount, 1)
token = Map.get(attr, :token, @eth)
{:ok, encoded_output_data} =
%ExPlasma.Output{}
|> struct(%{
output_type: ExPlasma.payment_v1(),
output_data: %{
output_guard: output_guard,
token: token,
amount: amount
}
})
|> ExPlasma.Output.encode()
output_id = Position.new(blknum, 0, 0)
{:ok, encoded_output_id} =
%ExPlasma.Output{}
|> struct(%{output_id: output_id})
|> ExPlasma.Output.encode(as: :input)
%Output{
state: :confirmed,
output_type: ExPlasma.payment_v1(),
output_data: encoded_output_data,
output_id: encoded_output_id,
position: output_id.position,
blknum: blknum
}
end
def payment_v1_transaction_factory(attr \\ %{})
def payment_v1_transaction_factory(%{inputs: inputs, outputs: outputs, block: block, tx_index: tx_index}) do
entity = TestEntity.alice()
{input_ids, inputs} =
inputs
|> Enum.map(fn %{amount: amount, token: token} ->
%{output_id: output_id} = input = insert(:deposit_output, %{amount: amount, token: token})
decoded_id = ExPlasma.Output.decode_id!(output_id)
{decoded_id, input}
end)
|> Enum.unzip()
{decoded_outputs, outputs} =
outputs
|> Enum.map(fn %{amount: amount, token: token} ->
%{output_data: output_data} =
output = insert(:output, %{amount: amount, token: token, output_guard: entity.addr})
decoded_output = ExPlasma.Output.decode!(output_data)
{decoded_output, output}
end)
|> Enum.unzip()
tx_bytes =
ExPlasma.payment_v1()
|> Builder.new(%{inputs: input_ids, outputs: decoded_outputs})
|> Builder.sign!(List.duplicate(entity.priv_encoded, Enum.count(inputs)))
|> ExPlasma.encode!()
{:ok, tx_hash} = ExPlasma.Transaction.hash(tx_bytes)
%Transaction{
inputs: inputs,
outputs: outputs,
tx_bytes: tx_bytes,
tx_hash: tx_hash,
tx_type: ExPlasma.payment_v1(),
block: block,
tx_index: tx_index,
inserted_at: DateTime.truncate(DateTime.utc_now(), :second),
updated_at: DateTime.truncate(DateTime.utc_now(), :second)
}
end
def payment_v1_transaction_factory(attr) do
%{output_id: output_id} = input = :deposit_output |> build() |> set_state(:spent)
%{output_data: output_data} = output = build(:output)
tx_bytes =
case attr[:tx_bytes] do
nil ->
entity = TestEntity.alice()
ExPlasma.payment_v1()
|> Builder.new(%{
inputs: [ExPlasma.Output.decode_id!(output_id)],
outputs: [ExPlasma.Output.decode!(output_data)]
})
|> Builder.sign!([entity.priv_encoded])
|> ExPlasma.encode!()
bytes ->
bytes
end
{:ok, tx_hash} = ExPlasma.Transaction.hash(tx_bytes)
%Transaction{
inputs: Map.get(attr, :inputs, [input]),
outputs: Map.get(attr, :outputs, [output]),
tx_bytes: Map.get(attr, :tx_bytes, tx_bytes),
tx_hash: tx_hash,
tx_type: ExPlasma.payment_v1(),
block: Map.get(attr, :block),
tx_index: Map.get(attr, :tx_index, 0),
inserted_at: DateTime.truncate(DateTime.utc_now(), :second),
updated_at: DateTime.truncate(DateTime.utc_now(), :second)
}
end
def fee_transaction_factory(attr) do
owner = Map.fetch!(attr, :owner)
token = Map.fetch!(attr, :token)
amount = Map.fetch!(attr, :amount)
block = Map.fetch!(attr, :block)
plasma_output = ExPlasmaFee.new_output(owner, token, amount)
{:ok, fee_tx} =
ExPlasma.fee()
|> Builder.new(outputs: [plasma_output])
|> ExPlasmaTx.with_nonce(%{blknum: block.blknum, token: token})
output = insert(:output, %{amount: amount, token: token, output_guard: owner})
tx_bytes = ExPlasma.encode!(fee_tx, signed: true)
{:ok, tx_hash} = ExPlasma.Transaction.hash(tx_bytes)
%Transaction{
outputs: [output],
tx_bytes: tx_bytes,
tx_hash: tx_hash,
tx_type: ExPlasma.fee(),
block: block,
tx_index: Map.fetch!(attr, :tx_index),
inserted_at: DateTime.truncate(DateTime.utc_now(), :second),
updated_at: DateTime.truncate(DateTime.utc_now(), :second)
}
end
# The "lowest" unit in the hierarchy. This is made to form into transactions
def output_factory(attr \\ %{}) do
default_data = %{
output_guard: Map.get(attr, :output_guard, <<1::160>>),
token: Map.get(attr, :token, @eth),
amount: Map.get(attr, :amount, 10)
}
default_blknum = sequence(:output_blknum, fn seq -> (seq + 1) * 1000 end)
default_txindex = sequence(:output_txindex, fn seq -> seq + 1 end)
default_oindex = sequence(:output_oindex, fn seq -> seq + 1 end)
default_output_id =
Position.new(
Map.get(attr, :blknum, default_blknum),
Map.get(attr, :txindex, default_txindex),
Map.get(attr, :oindex, default_oindex)
)
{:ok, encoded_output_data} =
%ExPlasma.Output{}
|> struct(%{
output_type: Map.get(attr, :output_type, 1),
output_data: Map.get(attr, :output_data, default_data)
})
|> ExPlasma.Output.encode()
{:ok, encoded_output_id} =
%ExPlasma.Output{}
|> struct(%{output_id: Map.get(attr, :output_id, default_output_id)})
|> ExPlasma.Output.encode(as: :input)
%Output{
state: :pending,
output_type: ExPlasma.payment_v1(),
output_data: encoded_output_data,
output_id: encoded_output_id,
position: default_output_id.position,
blknum: default_blknum
}
end
def block_factory() do
nonce = sequence(:block_nonce, fn seq -> seq + 1 end)
%Block{
hash: :crypto.strong_rand_bytes(32),
nonce: nonce,
blknum: (nonce + 1) * 1000,
state: :forming,
tx_hash: :crypto.strong_rand_bytes(64),
formed_at_ethereum_height: 1,
submitted_at_ethereum_height: 1,
attempts_counter: 0,
transactions: [],
gas: 827
}
end
def merged_fee_factory() do
fees = %{
1 => %{
Base.decode16!("0000000000000000000000000000000000000000") => [1, 2],
Base.decode16!("0000000000000000000000000000000000000001") => [1]
},
2 => %{Base.decode16!("0000000000000000000000000000000000000000") => [1]}
}
hash =
:sha256
|> :crypto.hash(inspect(fees))
|> Base.encode16(case: :lower)
%Fee{
type: :merged_fees,
term: fees,
hash: hash,
inserted_at: DateTime.utc_now()
}
end
def current_fee_factory() do
fees = %{
1 => %{
Base.decode16!("0000000000000000000000000000000000000000") => %{
amount: 1,
subunit_to_unit: 1_000_000_000_000_000_000,
pegged_amount: 1,
pegged_currency: "USD",
pegged_subunit_to_unit: 100,
updated_at: DateTime.from_unix!(1_546_336_800)
},
Base.decode16!("0000000000000000000000000000000000000001") => %{
amount: 2,
subunit_to_unit: 1_000_000_000_000_000_000,
pegged_amount: 1,
pegged_currency: "USD",
pegged_subunit_to_unit: 100,
updated_at: DateTime.from_unix!(1_546_336_800)
}
},
2 => %{
Base.decode16!("0000000000000000000000000000000000000000") => %{
amount: 2,
subunit_to_unit: 1_000_000_000_000_000_000,
pegged_amount: 1,
pegged_currency: "USD",
pegged_subunit_to_unit: 100,
updated_at: DateTime.from_unix!(1_546_336_800)
}
}
}
hash =
:sha256
|> :crypto.hash(inspect(fees))
|> Base.encode16(case: :lower)
%Fee{
type: :current_fees,
term: fees,
hash: hash,
inserted_at: DateTime.utc_now()
}
end
def transaction_fee_factory(attr) do
%TransactionFee{
transaction: Map.fetch!(attr, :transaction),
amount: Map.fetch!(attr, :amount),
currency: Map.fetch!(attr, :currency)
}
end
defp set_state(%Output{} = output, state), do: %{output | state: state}
end
| 28.931398 | 110 | 0.614592 |
03d8e1d1b633fb9f9cb44319fb9a414fafdecc94 | 669 | exs | Elixir | projects/layabout/mix.exs | erik/sketches | 0a454ada58dee6db576e93cb2216dd750290329e | [
"MIT"
] | 1 | 2020-02-11T06:00:11.000Z | 2020-02-11T06:00:11.000Z | projects/layabout/mix.exs | erik/sketches | 0a454ada58dee6db576e93cb2216dd750290329e | [
"MIT"
] | 1 | 2017-09-23T19:41:29.000Z | 2017-09-25T05:12:38.000Z | projects/layabout/mix.exs | erik/sketches | 0a454ada58dee6db576e93cb2216dd750290329e | [
"MIT"
] | null | null | null | defmodule Layabout.Mixfile do
use Mix.Project
def project do
[app: :layabout,
version: "0.1.0",
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
# Specify extra applications you'll use from Erlang/Elixir
[extra_applications: [:logger, :slack, :cowboy, :plug],
mod: {Layabout, []}]
end
defp deps do
[{:slack, "~> 0.9"},
{:timex, "~> 3.1.7"},
{:cowboy, "~> 1.0.0"},
{:plug, "~> 1.0"},
{:poison, "~> 3.0"}]
end
end
| 22.3 | 62 | 0.572496 |
03d8ee404ec7931490632ddca1b8bd77344785f2 | 3,997 | exs | Elixir | test/components/form/search_input_test.exs | EddyLane/surface | 1f13259cbdf81b5a4740ee13349a48f8b6c54bb5 | [
"MIT"
] | 1 | 2020-12-29T10:43:19.000Z | 2020-12-29T10:43:19.000Z | test/components/form/search_input_test.exs | EddyLane/surface | 1f13259cbdf81b5a4740ee13349a48f8b6c54bb5 | [
"MIT"
] | null | null | null | test/components/form/search_input_test.exs | EddyLane/surface | 1f13259cbdf81b5a4740ee13349a48f8b6c54bb5 | [
"MIT"
] | null | null | null | defmodule Surface.Components.Form.SearchInputTest do
use Surface.ConnCase, async: true
alias Surface.Components.Form.SearchInput
test "empty input" do
html =
render_surface do
~H"""
<SearchInput form="song" field="title" />
"""
end
assert html =~ """
<input id="song_title" name="song[title]" type="search">
"""
end
test "setting the value" do
html =
render_surface do
~H"""
<SearchInput form="song" field="title" value="mytitle" />
"""
end
assert html =~ """
<input id="song_title" name="song[title]" type="search" value="mytitle">
"""
end
test "setting the class" do
html =
render_surface do
~H"""
<SearchInput form="song" field="title" class="input" />
"""
end
assert html =~ ~r/class="input"/
end
test "setting multiple classes" do
html =
render_surface do
~H"""
<SearchInput form="song" field="title" class="input primary" />
"""
end
assert html =~ ~r/class="input primary"/
end
test "passing other options" do
html =
render_surface do
~H"""
<SearchInput form="song" field="title" opts={{ autofocus: "autofocus" }} />
"""
end
assert html =~ """
<input autofocus="autofocus" id="song_title" name="song[title]" type="search">
"""
end
test "blur event with parent live view as target" do
html =
render_surface do
~H"""
<SearchInput form="user" field="color" value="mytitle" blur="my_blur" />
"""
end
assert html =~ """
<input id="user_color" name="user[color]" phx-blur="my_blur" type="search" value="mytitle">
"""
end
test "focus event with parent live view as target" do
html =
render_surface do
~H"""
<SearchInput form="user" field="color" value="mytitle" focus="my_focus" />
"""
end
assert html =~ """
<input id="user_color" name="user[color]" phx-focus="my_focus" type="search" value="mytitle">
"""
end
test "capture click event with parent live view as target" do
html =
render_surface do
~H"""
<SearchInput form="user" field="color" value="mytitle" capture_click="my_click" />
"""
end
assert html =~ """
<input id="user_color" name="user[color]" phx-capture-click="my_click" type="search" value="mytitle">
"""
end
test "keydown event with parent live view as target" do
html =
render_surface do
~H"""
<SearchInput form="user" field="color" value="mytitle" keydown="my_keydown" />
"""
end
assert html =~ """
<input id="user_color" name="user[color]" phx-keydown="my_keydown" type="search" value="mytitle">
"""
end
test "keyup event with parent live view as target" do
html =
render_surface do
~H"""
<SearchInput form="user" field="color" value="mytitle" keyup="my_keyup" />
"""
end
assert html =~ """
<input id="user_color" name="user[color]" phx-keyup="my_keyup" type="search" value="mytitle">
"""
end
test "setting id and name through props" do
html =
render_surface do
~H"""
<SearchInput form="user" field="title" id="mytitle" name="mytitle" />
"""
end
assert html =~ """
<input id="mytitle" name="mytitle" type="search">
"""
end
end
defmodule Surface.Components.Form.SearchInputConfigTest do
use Surface.ConnCase
alias Surface.Components.Form.SearchInput
test ":default_class config" do
using_config SearchInput, default_class: "default_class" do
html =
render_surface do
~H"""
<SearchInput/>
"""
end
assert html =~ ~r/class="default_class"/
end
end
end
| 24.371951 | 112 | 0.56017 |
03d90c39a85c84ec7fc382e9d2d2f958ab96c0c8 | 76 | exs | Elixir | server/test/views/layout_view_test.exs | CircleAcademy/circle-website | b519e1e7c1d90566b7dfdaeda20ddd71abf6c832 | [
"MIT"
] | null | null | null | server/test/views/layout_view_test.exs | CircleAcademy/circle-website | b519e1e7c1d90566b7dfdaeda20ddd71abf6c832 | [
"MIT"
] | null | null | null | server/test/views/layout_view_test.exs | CircleAcademy/circle-website | b519e1e7c1d90566b7dfdaeda20ddd71abf6c832 | [
"MIT"
] | null | null | null | defmodule Website.LayoutViewTest do
use Website.ConnCase, async: true
end
| 19 | 35 | 0.815789 |
03d91a621d4474cf13a5d4fc93e077c55e19bcc0 | 1,820 | exs | Elixir | apps/subs_web/mix.exs | gitter-badger/opensubs.io | 76d5b4d355a530c8f496efe3ac2095d87f078997 | [
"MIT"
] | null | null | null | apps/subs_web/mix.exs | gitter-badger/opensubs.io | 76d5b4d355a530c8f496efe3ac2095d87f078997 | [
"MIT"
] | null | null | null | apps/subs_web/mix.exs | gitter-badger/opensubs.io | 76d5b4d355a530c8f496efe3ac2095d87f078997 | [
"MIT"
] | null | null | null | defmodule SubsWeb.Mixfile do
use Mix.Project
def project do
[
app: :subs_web,
version: "0.0.1",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.5",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
start_permanent: Mix.env == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {SubsWeb.Application, []},
extra_applications: [:logger, :runtime_tools, :elixir_make, :rollbax]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.3.0"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_ecto, "~> 3.2"},
{:phoenix_html, "~> 2.10"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:gettext, "~> 0.11"},
{:subs, in_umbrella: true},
{:repository, in_umbrella: true},
{:cowboy, "~> 1.0"},
{:guardian, "~> 1.0-beta"},
{:distillery, "~> 1.5", runtime: false},
{:wallaby, "~> 0.20.0", only: :test},
{:rollbax, ">= 0.0.0"},
{:jason, "~> 1.0"},
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, we extend the test task to create and migrate the database.
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
["test": ["ecto.create --quiet", "ecto.migrate", "test"]]
end
end
| 28 | 76 | 0.573626 |
03d93e4a852ee6c4d6d84240b4a3579bfefe1e5b | 5,047 | exs | Elixir | test/conduit_sqs/poller_test.exs | trbngr/conduit_sqs | d96056a0767765a67b418a978a19e54861fb2e71 | [
"MIT"
] | null | null | null | test/conduit_sqs/poller_test.exs | trbngr/conduit_sqs | d96056a0767765a67b418a978a19e54861fb2e71 | [
"MIT"
] | null | null | null | test/conduit_sqs/poller_test.exs | trbngr/conduit_sqs | d96056a0767765a67b418a978a19e54861fb2e71 | [
"MIT"
] | null | null | null | defmodule ConduitSQS.PollerTest do
use ExUnit.Case, async: true
import Injex.Test
import ExUnit.CaptureLog
alias ConduitSQS.Poller
alias Conduit.Message
describe "init/1" do
test "sets itself as a producer and stores it's state" do
queue = "conduitsqs-test"
subscriber_opts = []
adapter_opts = []
assert Poller.init([Broker, queue, subscriber_opts, adapter_opts]) == {
:producer,
%Poller.State{
broker: Broker,
queue: queue,
subscriber_opts: subscriber_opts,
adapter_opts: adapter_opts
},
[demand: :accumulate]
}
assert_received :check_active
end
end
describe "handle_demand/2" do
test "when there is already demand, it adds the new demand to the current demand" do
state = %Poller.State{demand: 1}
assert Poller.handle_demand(2, state) == {:noreply, [], %Poller.State{demand: 3}}
refute_received :get_messages
end
test "when there is no demand, it sets the new demand and schedules the poller" do
state = %Poller.State{demand: 0}
assert Poller.handle_demand(3, state) == {:noreply, [], %Poller.State{demand: 3}}
assert_received :get_messages
end
end
describe "handle_info/2 :get_messages" do
defmodule SQSEqual do
def get_messages(_queue, fetch_amount, _subscriber_opts, _adapter_opts) do
Enum.map(1..fetch_amount, fn _ -> %Message{} end)
end
end
test "when all demand is handled, it produces messages and updates demand" do
override Poller, sqs: SQSEqual do
state = %Poller.State{
queue: "conduitsqs-test",
subscriber_opts: [max_number_of_messages: 5],
adapter_opts: [],
demand: 5
}
assert Poller.handle_info(:get_messages, state) == {
:noreply,
[%Message{}, %Message{}, %Message{}, %Message{}, %Message{}],
%Poller.State{
queue: "conduitsqs-test",
subscriber_opts: [max_number_of_messages: 5],
adapter_opts: [],
demand: 0
},
:hibernate
}
refute_received :get_messages
end
end
test "when demand equal to the fetch limit is handled, it produces messags, updates demand, and schedules immediately" do
override Poller, sqs: SQSEqual do
state = %Poller.State{
queue: "conduitsqs-test",
subscriber_opts: [max_number_of_messages: 5],
adapter_opts: [],
demand: 10
}
assert Poller.handle_info(:get_messages, state) == {
:noreply,
[%Message{}, %Message{}, %Message{}, %Message{}, %Message{}],
%Poller.State{
queue: "conduitsqs-test",
subscriber_opts: [max_number_of_messages: 5],
adapter_opts: [],
demand: 5
},
:hibernate
}
assert_received :get_messages
end
end
defmodule SQSLess do
def get_messages(_queue, fetch_amount, _subscriber_opts, _adapter_opts) do
Enum.map(1..(fetch_amount - 2), fn _ -> %Message{} end)
end
end
test "when demand less than the fetch limit is handled, it produces messags, updates demand, and schedules later" do
override Poller, sqs: SQSLess do
state = %Poller.State{
queue: "conduitsqs-test",
subscriber_opts: [max_number_of_messages: 5],
adapter_opts: [],
demand: 10
}
assert Poller.handle_info(:get_messages, state) == {
:noreply,
[%Message{}, %Message{}, %Message{}],
%Poller.State{
queue: "conduitsqs-test",
subscriber_opts: [max_number_of_messages: 5],
adapter_opts: [],
demand: 7
},
:hibernate
}
assert_receive :get_messages, 300
end
end
end
describe "handle_info/2 :check_active" do
defmodule MetaActive do
def pollers_active?(_broker) do
true
end
end
test "when pollers should be active" do
override Poller, meta: MetaActive do
result = assert capture_log(fn ->
Poller.handle_info(:check_active, %Poller.State{queue: "foo"})
end)
assert result =~ "Starting poller for queue \"foo\" in \"default region\""
assert_received {:"$gen_cast", {:"$demand", :forward}}
end
end
defmodule MetaInactive do
def pollers_active?(_broker) do
false
end
end
test "when pollers should not be active" do
override Poller, meta: MetaInactive do
Poller.handle_info(:check_active, %Poller.State{})
assert_receive :check_active, 40
end
end
end
end
| 30.221557 | 125 | 0.564494 |
03d975cf991730d61dbdc5bd72a699f5ec4cd793 | 354 | exs | Elixir | priv/repo/seeds.exs | guofei/embedchat | 6562108acd1d488dde457f28cf01d82b4c5a9bf8 | [
"MIT"
] | 27 | 2016-10-15T12:13:22.000Z | 2021-02-07T20:31:41.000Z | priv/repo/seeds.exs | guofei/embedchat | 6562108acd1d488dde457f28cf01d82b4c5a9bf8 | [
"MIT"
] | null | null | null | priv/repo/seeds.exs | guofei/embedchat | 6562108acd1d488dde457f28cf01d82b4c5a9bf8 | [
"MIT"
] | 4 | 2016-08-21T15:03:29.000Z | 2019-11-22T13:15:29.000Z | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# EmbedChat.Repo.insert!(%EmbedChat.SomeModel{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 29.5 | 61 | 0.70904 |
03d9bf94802df88924df3a614077de11291ac0ca | 3,014 | ex | Elixir | web/controllers/git_controller.ex | adanselm/exgitd | db693f3507a09788ea1f45901b15233e650ccbaa | [
"MIT"
] | 3 | 2015-01-13T18:38:37.000Z | 2017-10-27T17:04:09.000Z | web/controllers/git_controller.ex | adanselm/exgitd | db693f3507a09788ea1f45901b15233e650ccbaa | [
"MIT"
] | null | null | null | web/controllers/git_controller.ex | adanselm/exgitd | db693f3507a09788ea1f45901b15233e650ccbaa | [
"MIT"
] | null | null | null | defmodule Exgitd.GitController do
use Phoenix.Controller
alias Exgitd.GitPort
plug :action
## Create full repo path on server from username and repo name
defp make_path(user, repo_name) do
pathroot = Application.get_env(:exgitd, :repositories_root)
unless String.ends_with?(repo_name, ".git"), do: repo_name = repo_name <> ".git"
Path.join [pathroot, user, repo_name]
end
defp make_path(user) do
pathroot = Application.get_env(:exgitd, :repositories_root)
Path.join [pathroot, user]
end
def index(conn, %{"user" => user}) do
full_path = make_path(user)
{:ok, list} = File.ls(full_path)
text = List.foldr(list, "", fn (x, acc) -> x <> "\n" <> acc end)
send_packet conn, "text/plain", text
end
def create(conn, %{"repo" => repo, "user" => user}) do
full_path = make_path(user, repo)
GitPort.create_bare(full_path)
send_packet conn, "text/plain", full_path
end
def get_info_refs(conn, %{"service" => "git-receive-pack", "repo" => repo, "user" => user}) do
packet = pkt_line("# service=git-receive-pack\n")
packet = packet <> GitPort.receive_pack(make_path(user, repo))
#IO.puts to_string(packet)
send_packet conn, "application/x-git-receive-pack-advertisement", packet
end
def get_info_refs(conn, %{"service" => "git-upload-pack", "repo" => repo, "user" => user}) do
packet = pkt_line("# service=git-upload-pack\n")
packet = packet <> GitPort.upload_pack(make_path(user, repo))
#IO.puts to_string(packet)
send_packet conn, "application/x-git-upload-pack-advertisement", packet
end
defp pkt_line(line) do
packetSize = Integer.to_string(String.length(line) + 4, 16)
packetSize = String.rjust(String.downcase(packetSize), 4, ?0)
"#{packetSize}#{line}0000"
end
def post_receive_pack(conn, %{"repo" => repo, "user" => user}) do
data = read_long_body(conn)
packet = GitPort.post_receive_pack(make_path(user, repo), data)
#IO.inspect packet
send_packet conn, "application/x-git-receive-pack-result", packet
end
def post_upload_pack(conn, %{"repo" => repo, "user" => user}) do
data = read_long_body(conn)
packet = GitPort.post_upload_pack(make_path(user, repo), data)
#IO.inspect packet
send_packet conn, "application/x-git-upload-pack-result", packet
end
defp send_packet(conn, content_type, data) do
# Can't use Phoenix shortcuts because we need to control the
# content type and charset.
conn
|> put_resp_header("Expires", "Fri, 01 Jan 1980 00:00:00 GMT")
|> put_resp_header("Cache-Control", "no-cache, max-age=0, must-revalidate")
|> put_resp_header("Pragma", "no-cache")
|> put_resp_content_type(content_type, nil)
|> send_resp 200, data
end
defp read_long_body(conn) do
read_long_body conn, ""
end
defp read_long_body(conn, acc) do
case read_body(conn) do
{:ok, data, _rest} -> acc <> data
{:more, partial, rest} -> read_long_body(rest, acc <> partial)
end
end
end
| 32.408602 | 96 | 0.674187 |
03d9d34101ea1c91b21122d2086c5fa5c5108015 | 284 | ex | Elixir | lib/string/chars/vivid/frame.ex | jamesotron/vivid | 01a0089384d8daced8f22a97c49e657fc245fb3c | [
"MIT"
] | 26 | 2017-01-03T01:18:38.000Z | 2019-02-21T09:26:20.000Z | lib/string/chars/vivid/frame.ex | jamesotron/vivid | 01a0089384d8daced8f22a97c49e657fc245fb3c | [
"MIT"
] | null | null | null | lib/string/chars/vivid/frame.ex | jamesotron/vivid | 01a0089384d8daced8f22a97c49e657fc245fb3c | [
"MIT"
] | 2 | 2017-03-18T22:45:49.000Z | 2017-05-08T07:24:58.000Z | defimpl String.Chars, for: Vivid.Frame do
alias Vivid.Frame
@doc """
Convert a `frame` into a `string` for `IO.puts`, etc.
"""
@spec to_string(Frame.t()) :: String.t()
def to_string(%Frame{} = frame) do
frame
|> Frame.buffer()
|> Kernel.to_string()
end
end
| 20.285714 | 55 | 0.612676 |
03d9dabcbaab55b3be805ac0c4c92dc14e64fa39 | 2,009 | ex | Elixir | lib/ffaker/en/company.ex | marocchino/ffaker | 5bb4420a8034db9f954a2464cd72039129ef2307 | [
"MIT"
] | 9 | 2016-11-09T15:50:05.000Z | 2021-02-02T05:45:06.000Z | lib/ffaker/en/company.ex | marocchino/ffaker | 5bb4420a8034db9f954a2464cd72039129ef2307 | [
"MIT"
] | 3 | 2016-09-01T09:54:46.000Z | 2017-07-25T05:23:56.000Z | lib/ffaker/en/company.ex | marocchino/ffaker | 5bb4420a8034db9f954a2464cd72039129ef2307 | [
"MIT"
] | 4 | 2016-11-22T17:51:48.000Z | 2021-02-02T05:45:01.000Z | defmodule Ffaker.En.Company do
@moduledoc"""
Functions for company data in English
"""
use Ffaker
alias Ffaker.En.Name
@suffixes ~w(Inc LLC Group)
@position_prefixes ~w(Executive Assistant General Associate)
@positions ~w(President Manager Director Secretary Consultant)
@doc"""
Returns company name
## Examples
iex> Ffaker.En.Company.name
"Carroll, Reinger and Fritsch "
"""
@spec name() :: String.t
def name do
first = Name.last_name
second = Name.last_name
last = Name.last_name
["#{first} #{suffix()}",
"#{first}-#{second}",
"#{first}, #{second} and #{last}"]
|> Enum.random
end
@doc"""
Returns company suffix
## Examples
iex> Ffaker.En.Company.suffix
"Inc"
"""
@spec suffix() :: String.t
def suffix do
Enum.random(@suffixes)
end
@doc"""
Returns company catch phrase
## Examples
iex> Ffaker.En.Company.catch_phrase
"Secured homogeneous extranet"
"""
@spec catch_phrase() :: String.t
def catch_phrase do
[~F(catch_pre), ~F(catch_mid), ~F(catch_pos)]
|> Enum.map(&Enum.random/1)
|> Enum.join(" ")
end
@doc"""
Returns company bs
## Examples
iex> Ffaker.En.Company.bs
"orchestrate transparent e-tailers"
"""
@spec bs() :: String.t
def bs do
[~F(bs_pre), ~F(bs_mid), ~F(bs_pos)]
|> Enum.map(&Enum.random/1)
|> Enum.join(" ")
end
@doc"""
Returns company position
## Examples
iex> Ffaker.En.Company.position
"Inc"
"""
@spec position() :: String.t
def position do
pos = Enum.random(@positions)
prefix = Enum.random(@position_prefixes)
["#{position_area()} #{pos}",
"#{prefix} #{pos}",
"#{prefix} #{position_area()} #{pos}"]
|> Enum.random
end
@doc"""
Returns company position area
## Examples
iex> Ffaker.En.Company.position_area
"IT"
"""
@spec position_area() :: String.t
def position_area do
Enum.random(~F(position_areas))
end
end
| 19.133333 | 64 | 0.607765 |
03d9e35b56133ebfa77c8bab50109cda197196a0 | 1,799 | ex | Elixir | clients/slides/lib/google_api/slides/v1/model/theme_color_pair.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/slides/lib/google_api/slides/v1/model/theme_color_pair.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/slides/lib/google_api/slides/v1/model/theme_color_pair.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Slides.V1.Model.ThemeColorPair do
@moduledoc """
A pair mapping a theme color type to the concrete color it represents.
## Attributes
- color (RgbColor): The concrete color corresponding to the theme color type above. Defaults to: `null`.
- type (String): The type of the theme color. Defaults to: `null`.
- Enum - one of [THEME_COLOR_TYPE_UNSPECIFIED, DARK1, LIGHT1, DARK2, LIGHT2, ACCENT1, ACCENT2, ACCENT3, ACCENT4, ACCENT5, ACCENT6, HYPERLINK, FOLLOWED_HYPERLINK, TEXT1, BACKGROUND1, TEXT2, BACKGROUND2]
"""
defstruct [
:"color",
:"type"
]
end
defimpl Poison.Decoder, for: GoogleApi.Slides.V1.Model.ThemeColorPair do
import GoogleApi.Slides.V1.Deserializer
def decode(value, options) do
value
|> deserialize(:"color", :struct, GoogleApi.Slides.V1.Model.RgbColor, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Slides.V1.Model.ThemeColorPair do
def encode(value, options) do
GoogleApi.Slides.V1.Deserializer.serialize_non_nil(value, options)
end
end
| 35.27451 | 205 | 0.748749 |
03d9e64fa02df1c280288c0ca7cf1d621ef3ad1c | 324 | exs | Elixir | priv/repo/migrations/20210514162941_create_addresses.exs | jwarwick/student_list | d35a2fcef2025d3de9b7915682965c48481c1d15 | [
"MIT"
] | 1 | 2021-06-27T20:02:11.000Z | 2021-06-27T20:02:11.000Z | priv/repo/migrations/20210514162941_create_addresses.exs | jwarwick/student_list | d35a2fcef2025d3de9b7915682965c48481c1d15 | [
"MIT"
] | null | null | null | priv/repo/migrations/20210514162941_create_addresses.exs | jwarwick/student_list | d35a2fcef2025d3de9b7915682965c48481c1d15 | [
"MIT"
] | null | null | null | defmodule StudentList.Repo.Migrations.CreateAddresses do
use Ecto.Migration
def change do
create table(:addresses) do
add :address1, :string
add :address2, :string
add :city, :string
add :state, :string
add :zip, :string
add :phone, :string
timestamps()
end
end
end
| 19.058824 | 56 | 0.635802 |
03da032c4f945fed2920509e5ca92b97e8db0372 | 2,808 | ex | Elixir | lib/elixero/core_api/models/invoices/invoice.ex | philals/elixero | fd75fe4a6f0a93b1d2ff94adbb307d20f014d458 | [
"MIT"
] | 84 | 2016-11-09T01:15:17.000Z | 2022-01-06T02:55:35.000Z | lib/elixero/core_api/models/invoices/invoice.ex | philals/elixero | fd75fe4a6f0a93b1d2ff94adbb307d20f014d458 | [
"MIT"
] | 14 | 2017-03-10T04:16:07.000Z | 2021-11-10T16:39:19.000Z | lib/elixero/core_api/models/invoices/invoice.ex | philals/elixero | fd75fe4a6f0a93b1d2ff94adbb307d20f014d458 | [
"MIT"
] | 18 | 2017-03-11T21:12:15.000Z | 2022-02-22T20:07:10.000Z | defmodule EliXero.CoreApi.Models.Invoices.Invoice do
use Ecto.Schema
import Ecto.Changeset
@derive {Poison.Encoder, except: [:__meta__, :id]}
@fields [
:InvoiceID,
:InvoiceNumber,
:Type,
:Status,
:LineAmountTypes,
:Date,
:DueDate,
:ExpectedPaymentDate,
:PlannedPaymentDate,
:SubTotal,
:TotalTax,
:TotalDiscount,
:Total,
:CurrencyCode,
:CurrencyRate,
:FullyPaidOnDate,
:AmountDue,
:AmountPaid,
:AmountCredited,
:HasAttachments,
:BrandingThemeID,
:Url,
:Reference,
:SentToContact,
:UpdatedDateUTC,
:StatusAttributeString
]
schema "invoices" do
field :InvoiceID, Ecto.UUID
field :InvoiceNumber, :string
embeds_one :Contact, EliXero.CoreApi.Models.Contacts.Contact
field :Type, :string
field :Status, :string
field :LineAmountTypes, :string
field :Date, :string
field :DueDate, :string
field :ExpectedPaymentDate, :string
field :PlannedPaymentDate, :string
field :SubTotal, :decimal
field :TotalTax, :decimal
field :TotalDiscount, :decimal
field :Total, :decimal
field :CurrencyCode, :string
field :CurrencyRate, :decimal
field :FullyPaidOnDate, :string
field :AmountDue, :decimal
field :AmountPaid, :decimal
field :AmountCredited, :decimal
field :HasAttachments, :boolean
field :BrandingThemeID, Ecto.UUID
field :Url, :string
field :Reference, :string
embeds_many :LineItems, EliXero.CoreApi.Models.Common.LineItem
field :SentToContact, :boolean
embeds_many :CreditNotes, EliXero.CoreApi.Models.CreditNotes.CreditNote
embeds_many :Prepayments, EliXero.CoreApi.Models.Prepayments.Prepayment
embeds_many :Overpayments, EliXero.CoreApi.Models.Overpayments.Overpayment
embeds_many :Payments, EliXero.CoreApi.Models.Payments.Payment
field :UpdatedDateUTC, :string
embeds_many :ValidationErrors, EliXero.CoreApi.Models.Common.Error
embeds_many :Warnings, EliXero.CoreApi.Models.Common.Warning
field :StatusAttributeString, :string
end
def changeset(struct, data) do
struct
|> cast(data, @fields)
|> cast_embed(:Contact)
|> cast_embed(:LineItems)
|> cast_embed(:CreditNotes)
|> cast_embed(:Prepayments)
|> cast_embed(:Overpayments)
|> cast_embed(:Payments)
|> cast_embed(:ValidationErrors)
|> cast_embed(:Warnings)
end
end | 33.035294 | 83 | 0.602564 |
03da2605b12c40fc73798a5718f4f528de36a5c5 | 70 | exs | Elixir | app/phxfeeds/test/test_helper.exs | chrispaynes/PhoenixFeeds | dd6451b96c8005eff94727da760a3716ecb18545 | [
"MIT"
] | null | null | null | app/phxfeeds/test/test_helper.exs | chrispaynes/PhoenixFeeds | dd6451b96c8005eff94727da760a3716ecb18545 | [
"MIT"
] | null | null | null | app/phxfeeds/test/test_helper.exs | chrispaynes/PhoenixFeeds | dd6451b96c8005eff94727da760a3716ecb18545 | [
"MIT"
] | null | null | null | ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(Phxfeeds.Repo, :manual)
| 23.333333 | 54 | 0.785714 |
03da428a562931c02a322328efd2ca7fa2a7dc10 | 2,143 | exs | Elixir | portfolio/config/dev.exs | JackMaarek/portfolio | 4423e67df870b14228edbc9e4ce3f3cdf1bccc2d | [
"MIT"
] | null | null | null | portfolio/config/dev.exs | JackMaarek/portfolio | 4423e67df870b14228edbc9e4ce3f3cdf1bccc2d | [
"MIT"
] | 11 | 2020-04-29T10:28:20.000Z | 2020-04-29T11:03:13.000Z | portfolio/config/dev.exs | JackMaarek/portfolio | 4423e67df870b14228edbc9e4ce3f3cdf1bccc2d | [
"MIT"
] | null | null | null | use Mix.Config
# Configure your database
config :portfolio, Portfolio.Repo,
username: "postgres",
password: "postgres",
database: "portfolio",
hostname: "db",
show_sensitive_data_on_connection_error: true,
pool_size: 10
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :portfolio, PortfolioWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch-stdin",
cd: Path.expand("../assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :portfolio, PortfolioWeb.Endpoint,
live_reload: [
patterns: [
~r"priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$",
~r"priv/gettext/.*(po)$",
~r"lib/portfolio_web/(live|views)/.*(ex)$",
~r"lib/portfolio_web/templates/.*(eex)$"
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 27.831169 | 68 | 0.691554 |
03da4d52959d5d034bc26d273d1e3d04b785953d | 2,032 | ex | Elixir | lib/ueberauth/strategy/linkedin/oauth.ex | mayulu/ueberauth_linkedin | a758ad6daa2bd8469f51a54bb2ea0e4efbf46df1 | [
"MIT"
] | 9 | 2016-04-17T21:50:24.000Z | 2021-04-26T11:22:46.000Z | lib/ueberauth/strategy/linkedin/oauth.ex | mayulu/ueberauth_linkedin | a758ad6daa2bd8469f51a54bb2ea0e4efbf46df1 | [
"MIT"
] | 9 | 2016-06-02T01:25:52.000Z | 2021-10-01T02:30:08.000Z | lib/ueberauth/strategy/linkedin/oauth.ex | mayulu/ueberauth_linkedin | a758ad6daa2bd8469f51a54bb2ea0e4efbf46df1 | [
"MIT"
] | 35 | 2016-05-29T23:16:03.000Z | 2022-03-06T09:57:28.000Z | defmodule Ueberauth.Strategy.LinkedIn.OAuth do
@moduledoc """
OAuth2 for LinkedIn.
Add `client_id` and `client_secret` to your configuration:
config :ueberauth, Ueberauth.Strategy.LinkedIn.OAuth,
client_id: System.get_env("LINKEDIN_CLIENT_ID"),
client_secret: System.get_env("LINKEDIN_CLIENT_SECRET")
"""
use OAuth2.Strategy
@defaults [
strategy: __MODULE__,
site: "https://api.linkedin.com",
authorize_url: "https://www.linkedin.com/uas/oauth2/authorization",
token_url: "https://www.linkedin.com/uas/oauth2/accessToken"
]
@doc """
Construct a client for requests to LinkedIn.
This will be setup automatically for you in `Ueberauth.Strategy.LinkedIn`.
These options are only useful for usage outside the normal callback phase of
Ueberauth.
"""
def client(opts \\ []) do
config = Application.get_env(:ueberauth, Ueberauth.Strategy.LinkedIn.OAuth)
opts =
@defaults
|> Keyword.merge(config)
|> Keyword.merge(opts)
json_library = Ueberauth.json_library()
OAuth2.Client.new(opts)
|> OAuth2.Client.put_serializer("application/json", json_library)
end
@doc """
Provides the authorize url for the request phase of Ueberauth.
No need to call this usually.
"""
def authorize_url!(params \\ [], opts \\ []) do
opts
|> client
# |> put_param(:state, "idos")
|> OAuth2.Client.authorize_url!(params)
end
def get_token!(params \\ [], opts \\ []) do
opts
|> client
|> OAuth2.Client.get_token!(params)
end
def get(token, url, headers \\ [], opts \\ []) do
client([token: token])
|> OAuth2.Client.get(url, headers, opts)
end
# Strategy Callbacks
def authorize_url(client, params) do
OAuth2.Strategy.AuthCode.authorize_url(client, params)
end
def get_token(client, params, headers) do
client
|> put_param("client_secret", client.client_secret)
|> put_header("Accept", "application/json")
|> OAuth2.Strategy.AuthCode.get_token(params, headers)
end
end
| 26.38961 | 79 | 0.682087 |
03da4ea53e3bcefb8befac8427558e631e3d5381 | 161 | ex | Elixir | lib/pipelines/tokenizer_behavior.ex | vinniefranco/pipelines | 0b0313765002428a8f302e1fe8231cb00cfbc283 | [
"MIT"
] | 1 | 2020-11-26T00:06:16.000Z | 2020-11-26T00:06:16.000Z | lib/pipelines/tokenizer_behavior.ex | vinniefranco/pipelines | 0b0313765002428a8f302e1fe8231cb00cfbc283 | [
"MIT"
] | null | null | null | lib/pipelines/tokenizer_behavior.ex | vinniefranco/pipelines | 0b0313765002428a8f302e1fe8231cb00cfbc283 | [
"MIT"
] | null | null | null | defmodule Pipelines.Tokenizer do
@moduledoc false
@callback tokenize({:error, term}) :: {:error, term}
@callback tokenize({:ok, term}) :: {:ok, term}
end
| 23 | 54 | 0.670807 |
03da585c003a5c6c0598b29f767a4450efe37aa2 | 1,146 | ex | Elixir | lib/kamleague/leagues/players_games.ex | mzavoloka/kamleague | ba29263ed54cac5c67b537c4b7d1dbc522215341 | [
"MIT"
] | null | null | null | lib/kamleague/leagues/players_games.ex | mzavoloka/kamleague | ba29263ed54cac5c67b537c4b7d1dbc522215341 | [
"MIT"
] | 2 | 2021-11-04T21:05:24.000Z | 2021-11-04T21:51:48.000Z | lib/kamleague/leagues/players_games.ex | mzavoloka/kamleague | ba29263ed54cac5c67b537c4b7d1dbc522215341 | [
"MIT"
] | 1 | 2021-11-04T18:40:26.000Z | 2021-11-04T18:40:26.000Z | defmodule Kamleague.Leagues.PlayersGames do
use Ecto.Schema
import Ecto.Changeset
schema "players_games" do
belongs_to :player_info, Kamleague.Leagues.Player, foreign_key: :player_id
belongs_to :game, Kamleague.Leagues.Game, foreign_key: :game_id
field :location, :integer
field :win, :boolean
field :old_elo, :integer
field :new_elo, :integer
field :old_wins, :integer
field :new_wins, :integer
field :old_losses, :integer
field :new_losses, :integer
field :approved, :boolean, default: false
timestamps()
end
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [
:player_id,
:game_id,
:location,
:win,
:old_elo,
:new_elo,
:old_wins,
:new_wins,
:old_losses,
:new_losses,
:approved
])
|> validate_required([
:player_id,
:game_id,
:location,
:win,
:old_elo,
:new_elo,
:old_wins,
:new_wins,
:old_losses,
:new_losses
])
end
def changeset_approve(changeset, params) do
changeset
|> cast(params, [:approved])
end
end
| 20.464286 | 78 | 0.613438 |
03da888b4a3a2fbfa33a94d720d1676d886ce218 | 848 | ex | Elixir | lib/game/emails.ex | shanesveller/ex_venture | 68507da11442a9e0423073fcd305e9021f649ca1 | [
"MIT"
] | null | null | null | lib/game/emails.ex | shanesveller/ex_venture | 68507da11442a9e0423073fcd305e9021f649ca1 | [
"MIT"
] | null | null | null | lib/game/emails.ex | shanesveller/ex_venture | 68507da11442a9e0423073fcd305e9021f649ca1 | [
"MIT"
] | null | null | null | defmodule Game.Emails do
@moduledoc """
Module for game emails
"""
alias Game.Config
use Bamboo.Phoenix, view: Web.EmailView
@from_email Application.get_env(:ex_venture, :mailer)[:from]
def welcome(user) do
base_email()
|> to(user.email)
|> subject("Welcome to #{Config.game_name()}")
|> render("welcome.text", user: user)
end
def new_mail(mail) do
base_email()
|> to(mail.receiver.email)
|> subject("You have new mail in #{Config.game_name()}")
|> render("mail.html", mail: mail)
|> render("mail.text", mail: mail)
end
def password_reset(user) do
base_email()
|> to(user.email)
|> subject("Password reset for #{Config.game_name()}")
|> render("reset.html", user: user)
end
def base_email() do
new_email()
|> from(ExVenture.config(@from_email))
end
end
| 21.74359 | 62 | 0.634434 |
03da8a21dc6280c487007e139de590604052a06c | 1,715 | exs | Elixir | apps/waylon_web/config/dev.exs | thyagostall/waylon | d2298e8b66d7a0e2e4819f11607c0bc94a7e8543 | [
"MIT"
] | null | null | null | apps/waylon_web/config/dev.exs | thyagostall/waylon | d2298e8b66d7a0e2e4819f11607c0bc94a7e8543 | [
"MIT"
] | null | null | null | apps/waylon_web/config/dev.exs | thyagostall/waylon | d2298e8b66d7a0e2e4819f11607c0bc94a7e8543 | [
"MIT"
] | null | null | null | # Since configuration is shared in umbrella projects, this file
# should only configure the :waylon_web application itself
# and only for organization purposes. All other config goes to
# the umbrella root.
use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :waylon_web, WaylonWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch-stdin",
cd: Path.expand("../assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :waylon_web, WaylonWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/waylon_web/views/.*(ex)$},
~r{lib/waylon_web/templates/.*(eex)$}
]
]
| 27.66129 | 63 | 0.673469 |
03daa12ff386b36a4e7e01b96d83ad4ab8d7037a | 207 | exs | Elixir | apps/bookmarker/priv/repo/migrations/20190811152312_add_is_favorite_to_folders.exs | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | 4 | 2019-10-04T16:11:15.000Z | 2021-08-18T21:00:13.000Z | apps/bookmarker/priv/repo/migrations/20190811152312_add_is_favorite_to_folders.exs | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | 5 | 2020-03-16T23:52:25.000Z | 2021-09-03T16:52:17.000Z | apps/bookmarker/priv/repo/migrations/20190811152312_add_is_favorite_to_folders.exs | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | null | null | null | defmodule Bookmarker.Repo.Migrations.AddIsFavoriteToFolders do
use Ecto.Migration
def change do
alter table(:folders) do
add :is_favorite, :boolean, null: false, default: false
end
end
end
| 20.7 | 62 | 0.743961 |
03daad0baefdaae4a70242acdb89d534297d4e29 | 529 | ex | Elixir | umbrella/apps/sunulator_web/lib/sunulator_web/router.ex | plasticine/sunulator | f202518bae70cad06ea9d38e183c1b9873a77b18 | [
"MIT"
] | 2 | 2019-06-05T23:28:44.000Z | 2019-06-05T23:41:20.000Z | umbrella/apps/sunulator_web/lib/sunulator_web/router.ex | plasticine/sunulator | f202518bae70cad06ea9d38e183c1b9873a77b18 | [
"MIT"
] | 3 | 2021-03-09T12:30:08.000Z | 2021-09-01T19:32:06.000Z | umbrella/apps/sunulator_web/lib/sunulator_web/router.ex | plasticine/sunulator | f202518bae70cad06ea9d38e183c1b9873a77b18 | [
"MIT"
] | null | null | null | defmodule SunulatorWeb.Router do
use SunulatorWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug Phoenix.LiveView.Flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", SunulatorWeb do
pipe_through :browser
get "/", PageController, :index
end
# Other scopes may use custom stacks.
# scope "/api", SunulatorWeb do
# pipe_through :api
# end
end
| 18.892857 | 39 | 0.68242 |
03daf038ce90ed258ec8b1bf7a0770f9a01f4a01 | 665 | exs | Elixir | test/gmail/base_test.exs | philwade/elixir-gmail | 4c44fbc63b194dc4f6f02caec7c6ee6164a3a9a3 | [
"MIT"
] | 52 | 2015-02-19T23:23:34.000Z | 2020-07-12T12:53:09.000Z | test/gmail/base_test.exs | philwade/elixir-gmail | 4c44fbc63b194dc4f6f02caec7c6ee6164a3a9a3 | [
"MIT"
] | 17 | 2016-06-01T18:49:09.000Z | 2020-11-11T21:26:21.000Z | test/gmail/base_test.exs | philwade/elixir-gmail | 4c44fbc63b194dc4f6f02caec7c6ee6164a3a9a3 | [
"MIT"
] | 26 | 2016-03-08T09:13:58.000Z | 2021-09-27T08:39:22.000Z | ExUnit.start
defmodule Gmail.BaseTest do
use ExUnit.Case
test "uses the url in the app config if there is one" do
url = "http://appconfig.example.com"
Application.put_env :gmail, :api, %{url: url}
assert Gmail.Base.base_url == url
end
test "uses the default base url if nothing is set in the app config" do
Application.delete_env :gmail, :api
assert Gmail.Base.base_url == "https://www.googleapis.com/gmail/v1/"
end
test "uses the default base url if app config is set but has no url" do
Application.put_env :gmail, :api, %{nothing: "here"}
assert Gmail.Base.base_url == "https://www.googleapis.com/gmail/v1/"
end
end
| 28.913043 | 73 | 0.696241 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.