hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ff40f832b23489132d1e18d6615cd3196ba9d2a7 | 209 | exs | Elixir | test/wiring_editor_web/controllers/page_controller_test.exs | apre/wiring_editor | 54337f97c95594258038a34949ebca7b423cbf6e | [
"WTFPL",
"Unlicense"
] | null | null | null | test/wiring_editor_web/controllers/page_controller_test.exs | apre/wiring_editor | 54337f97c95594258038a34949ebca7b423cbf6e | [
"WTFPL",
"Unlicense"
] | null | null | null | test/wiring_editor_web/controllers/page_controller_test.exs | apre/wiring_editor | 54337f97c95594258038a34949ebca7b423cbf6e | [
"WTFPL",
"Unlicense"
] | null | null | null | defmodule WiringEditorWeb.PageControllerTest do
use WiringEditorWeb.ConnCase
test "GET /", %{conn: conn} do
conn = get conn, "/"
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 23.222222 | 60 | 0.698565 |
ff411090b259c45da0f344a03cfc4e43ce0e2ae2 | 1,402 | ex | Elixir | lib/google_api/you_tube/v3/model/activity_content_details_recommendation.ex | jesteracer/ytb | 67e3cab899e4f69e586383f7be2c3855c6beea49 | [
"Apache-2.0"
] | null | null | null | lib/google_api/you_tube/v3/model/activity_content_details_recommendation.ex | jesteracer/ytb | 67e3cab899e4f69e586383f7be2c3855c6beea49 | [
"Apache-2.0"
] | null | null | null | lib/google_api/you_tube/v3/model/activity_content_details_recommendation.ex | jesteracer/ytb | 67e3cab899e4f69e586383f7be2c3855c6beea49 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.YouTube.V3.Model.ActivityContentDetailsRecommendation do
@moduledoc """
Information that identifies the recommended resource.
"""
@derive [Poison.Encoder]
defstruct [
:"reason",
:"resourceId",
:"seedResourceId"
]
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.ActivityContentDetailsRecommendation do
import GoogleApi.YouTube.V3.Deserializer
def decode(value, options) do
value
|> deserialize(:"resourceId", :struct, GoogleApi.YouTube.V3.Model.ResourceId, options)
|> deserialize(:"seedResourceId", :struct, GoogleApi.YouTube.V3.Model.ResourceId, options)
end
end
| 33.380952 | 95 | 0.75535 |
ff411832e71e4cc8c676ecf8cd41a803fbf90a44 | 117 | exs | Elixir | config/config.exs | scottdavis/recaptcha | 1b46e1a346c100e327cbc028fc6b8e7e8942e974 | [
"MIT"
] | null | null | null | config/config.exs | scottdavis/recaptcha | 1b46e1a346c100e327cbc028fc6b8e7e8942e974 | [
"MIT"
] | null | null | null | config/config.exs | scottdavis/recaptcha | 1b46e1a346c100e327cbc028fc6b8e7e8942e974 | [
"MIT"
] | null | null | null | use Mix.Config
config :recaptcha,
verify_url: "https://www.google.com/recaptcha/api/siteverify",
timeout: 5000,
| 19.5 | 64 | 0.74359 |
ff413888e07b53eb14056e025e53a1b331b6d2a3 | 881 | exs | Elixir | test/orders/order_test.exs | Sup3r-Us3r/exlivery | 7d8c5c0011f53cdca1fc8345e1b267355995ac35 | [
"MIT"
] | null | null | null | test/orders/order_test.exs | Sup3r-Us3r/exlivery | 7d8c5c0011f53cdca1fc8345e1b267355995ac35 | [
"MIT"
] | null | null | null | test/orders/order_test.exs | Sup3r-Us3r/exlivery | 7d8c5c0011f53cdca1fc8345e1b267355995ac35 | [
"MIT"
] | null | null | null | defmodule ExliveryTest.Orders.OrderTest do
use ExUnit.Case
import ExliveryTest.Support.Factory
alias Exlivery.Orders.Order
describe "build/2" do
test "when all params are valid, returns an order" do
user = build(:user)
items = [
build(:item),
build(:item,
description: "Item description",
category: :hamburguer,
quantity: 2,
unity_price: Decimal.new("20.50")
)
]
response = Order.build(user, items)
expected_response = {:ok, build(:order)}
assert response == expected_response
end
test "when there is not items in the order, returns an error" do
user = build(:user)
items = []
response = Order.build(user, items)
expected_response = {:error, "Invalid parameters"}
assert response == expected_response
end
end
end
| 20.97619 | 68 | 0.614075 |
ff41595cf616c77d138eedeaa208ca8b697c8114 | 242 | ex | Elixir | lib/calctorio/recipe.ex | jdfrens/calctorio | 40418e3f7e562c0ade2a672434589727949e0439 | [
"MIT"
] | null | null | null | lib/calctorio/recipe.ex | jdfrens/calctorio | 40418e3f7e562c0ade2a672434589727949e0439 | [
"MIT"
] | null | null | null | lib/calctorio/recipe.ex | jdfrens/calctorio | 40418e3f7e562c0ade2a672434589727949e0439 | [
"MIT"
] | null | null | null | defmodule Calctorio.Recipe do
@moduledoc """
Represents a recipe.
"""
defstruct inputs: [], outputs: [], time: 0.0
@type t :: %__MODULE__{
inputs: keyword,
outputs: keyword,
time: float
}
end
| 17.285714 | 46 | 0.553719 |
ff418aa418a022f204d6f5829aaac912d3b0bdd7 | 2,659 | ex | Elixir | clients/cloud_run/lib/google_api/cloud_run/v1alpha1/model/revision_condition.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/cloud_run/lib/google_api/cloud_run/v1alpha1/model/revision_condition.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/cloud_run/lib/google_api/cloud_run/v1alpha1/model/revision_condition.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.CloudRun.V1alpha1.Model.RevisionCondition do
@moduledoc """
RevisionCondition defines a readiness condition for a Revision.
## Attributes
- lastTransitionTime (String.t): Last time the condition transitioned from one status to another. +optional Defaults to: `null`.
- message (String.t): Human readable message indicating details about the current status. +optional Defaults to: `null`.
- reason (String.t): One-word CamelCase reason for the condition's last transition. +optional Defaults to: `null`.
- status (String.t): Status of the condition, one of True, False, Unknown. Defaults to: `null`.
- type (String.t): RevisionConditionType is used to communicate the status of the reconciliation process. See also: https://github.com/knative/serving/blob/master/docs/spec/errors.md#error-conditions-and-reporting Types include: * \"Ready\": True when the Revision is ready. * \"ResourcesAvailable\": True when underlying resources have been provisioned. * \"ContainerHealthy\": True when the Revision readiness check completes. * \"Active\": True when the Revision may receive traffic. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:lastTransitionTime => any(),
:message => any(),
:reason => any(),
:status => any(),
:type => any()
}
field(:lastTransitionTime)
field(:message)
field(:reason)
field(:status)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.CloudRun.V1alpha1.Model.RevisionCondition do
def decode(value, options) do
GoogleApi.CloudRun.V1alpha1.Model.RevisionCondition.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudRun.V1alpha1.Model.RevisionCondition do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 44.316667 | 550 | 0.737495 |
ff418ca449eac99f9cd3648cf38c729c8db3d244 | 1,319 | exs | Elixir | mix.exs | elixir-cqrs/cqrs_tools_absinthe | c80285f252eba8273384fdd1c718cf50b2d36671 | [
"MIT"
] | 3 | 2022-03-06T03:07:49.000Z | 2022-03-06T03:16:08.000Z | mix.exs | blunt-elixir/blunt_absinthe | c80285f252eba8273384fdd1c718cf50b2d36671 | [
"MIT"
] | null | null | null | mix.exs | blunt-elixir/blunt_absinthe | c80285f252eba8273384fdd1c718cf50b2d36671 | [
"MIT"
] | null | null | null | defmodule BluntAbsinthe.MixProject do
use Mix.Project
@version "0.1.0"
def project do
[
version: @version,
app: :blunt_absinthe,
version: "0.1.0",
elixir: "~> 1.12",
start_permanent: Mix.env() == :prod,
deps: deps(),
source_url: "https://github.com/elixir-blunt/blunt_absinthe",
package: [
description: "Absinthe macros for `blunt` commands and queries",
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/elixir-blunt/blunt_absinthe"}
],
elixirc_paths: elixirc_paths(Mix.env())
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:blunt, path: "../blunt", override: true},
{:blunt, "~> 0.1"},
{:absinthe, "~> 1.7"},
# For testing
{:etso, "~> 0.1.6", only: [:test]},
{:dialyxir, "~> 1.0", only: [:dev, :test], runtime: false},
{:elixir_uuid, "~> 1.6", only: [:dev, :test], override: true, hex: :uuid_utils},
# generate docs
{:ex_doc, "~> 0.28", only: :dev, runtime: false}
]
end
end
| 25.862745 | 86 | 0.56558 |
ff41b1a0cd0f15b9121bd5cf6c215b5e5d26f5b8 | 219 | ex | Elixir | lib/cbstats_importer/util.ex | micahyoung/cbstats-importer | a7176b3a25d8a4e919ddda32475c9ecf65e9892a | [
"Unlicense",
"MIT"
] | 1 | 2016-01-06T18:19:35.000Z | 2016-01-06T18:19:35.000Z | lib/cbstats_importer/util.ex | micahyoung/cbstats-importer | a7176b3a25d8a4e919ddda32475c9ecf65e9892a | [
"Unlicense",
"MIT"
] | null | null | null | lib/cbstats_importer/util.ex | micahyoung/cbstats-importer | a7176b3a25d8a4e919ddda32475c9ecf65e9892a | [
"Unlicense",
"MIT"
] | null | null | null | defmodule CbstatsImporter.Util do
def now_microseconds do
{megas, secs, millis} = :os.timestamp()
((megas * 1_000_000) + secs) * 1_000_000 + millis
end
def now_datetime do
Ecto.DateTime.utc
end
end
| 19.909091 | 53 | 0.689498 |
ff41b82d58f3729feb7aec798d633c6ee69a71c2 | 10,389 | ex | Elixir | lib/projections/ecto.ex | MrYawe/commanded-ecto-projections | 55f5103405e415578fe6b59cc72632f0e50ba51b | [
"MIT"
] | null | null | null | lib/projections/ecto.ex | MrYawe/commanded-ecto-projections | 55f5103405e415578fe6b59cc72632f0e50ba51b | [
"MIT"
] | null | null | null | lib/projections/ecto.ex | MrYawe/commanded-ecto-projections | 55f5103405e415578fe6b59cc72632f0e50ba51b | [
"MIT"
] | 1 | 2022-02-19T23:24:39.000Z | 2022-02-19T23:24:39.000Z | defmodule Commanded.Projections.Ecto do
@moduledoc """
Read model projections for Commanded using Ecto.
## Example usage
defmodule Projector do
use Commanded.Projections.Ecto,
application: MyApp.Application,
name: "my-projection",
repo: MyApp.Repo,
schema_prefix: "my-prefix",
timeout: :infinity
project %Event{}, _metadata, fn multi ->
Ecto.Multi.insert(multi, :my_projection, %MyProjection{...})
end
project %AnotherEvent{}, fn multi ->
Ecto.Multi.insert(multi, :my_projection, %MyProjection{...})
end
end
## Guides
- [Getting started](getting-started.html)
- [Usage](usage.html)
"""
defmacro __using__(opts) do
opts = opts || []
schema_prefix =
opts[:schema_prefix] || Application.get_env(:commanded_ecto_projections, :schema_prefix)
quote location: :keep do
@behaviour Commanded.Projections.Ecto
@opts unquote(opts)
@repo @opts[:repo] || Application.get_env(:commanded_ecto_projections, :repo) ||
raise("Commanded Ecto projections expects :repo to be configured in environment")
@timeout @opts[:timeout] || :infinity
# Pass through any other configuration to the event handler
@handler_opts Keyword.drop(@opts, [:repo, :schema_prefix, :timeout])
unquote(__include_schema_prefix__(schema_prefix))
unquote(__include_projection_version_schema__())
use Ecto.Schema
use Commanded.Event.Handler, @handler_opts
import Ecto.Changeset
import Ecto.Query
import unquote(__MODULE__)
def update_projection(event, metadata, multi_fn) do
projection_name = Map.fetch!(metadata, :handler_name)
event_number = Map.fetch!(metadata, :event_number)
changeset =
%ProjectionVersion{projection_name: projection_name}
|> ProjectionVersion.changeset(%{last_seen_event_number: event_number})
prefix = schema_prefix(event, metadata)
multi =
Ecto.Multi.new()
|> Ecto.Multi.run(:verify_projection_version, fn repo, _changes ->
version =
case repo.get(ProjectionVersion, projection_name, prefix: prefix) do
nil ->
repo.insert!(
%ProjectionVersion{
projection_name: projection_name,
last_seen_event_number: 0
},
prefix: prefix
)
version ->
version
end
if version.last_seen_event_number < event_number do
{:ok, %{version: version}}
else
{:error, :already_seen_event}
end
end)
|> Ecto.Multi.update(:projection_version, changeset, prefix: prefix)
with %Ecto.Multi{} = multi <- apply(multi_fn, [multi]),
{:ok, changes} <- transaction(multi) do
if function_exported?(__MODULE__, :after_update, 3) do
apply(__MODULE__, :after_update, [event, metadata, changes])
else
:ok
end
else
{:error, :verify_projection_version, :already_seen_event, _changes} -> :ok
{:error, _stage, error, _changes} -> {:error, error}
{:error, _error} = reply -> reply
end
end
def unsafe_update_projection(event, metadata, multi_fn) do
projection_name = Map.fetch!(metadata, :handler_name)
event_number = Map.fetch!(metadata, :event_number)
prefix = schema_prefix(event, metadata)
multi = Ecto.Multi.new()
with %Ecto.Multi{} = multi <- apply(multi_fn, [multi]),
{:ok, changes} <- transaction(multi) do
if function_exported?(__MODULE__, :after_update, 3) do
apply(__MODULE__, :after_update, [event, metadata, changes])
else
:ok
end
else
{:error, _stage, error, _changes} -> {:error, error}
{:error, _error} = reply -> reply
end
end
defp transaction(%Ecto.Multi{} = multi) do
@repo.transaction(multi, timeout: @timeout, pool_timeout: @timeout)
end
defoverridable schema_prefix: 1, schema_prefix: 2
end
end
## User callbacks
@optional_callbacks [after_update: 3, schema_prefix: 1, schema_prefix: 2]
@doc """
The optional `after_update/3` callback function defined in a projector is
called after each projected event.
The function receives the event, its metadata, and all changes from the
`Ecto.Multi` struct that were executed within the database transaction.
You could use this function to notify subscribers that the read model has been
updated, such as by publishing changes via Phoenix PubSub channels.
## Example
defmodule MyApp.ExampleProjector do
use Commanded.Projections.Ecto,
application: MyApp.Application,
repo: MyApp.Projections.Repo,
name: "MyApp.ExampleProjector"
project %AnEvent{name: name}, fn multi ->
Ecto.Multi.insert(multi, :example_projection, %ExampleProjection{name: name})
end
@impl Commanded.Projections.Ecto
def after_update(event, metadata, changes) do
# Use the event, metadata, or `Ecto.Multi` changes and return `:ok`
:ok
end
end
"""
@callback after_update(event :: struct, metadata :: map, changes :: Ecto.Multi.changes()) ::
:ok | {:error, any}
@doc """
The optional `schema_prefix/1` callback function defined in a projector is
used to set the schema of the `projection_versions` table used by the
projector for idempotency checks.
It is passed the event and its metadata and must return the schema name, as a
string, or `nil`.
"""
@callback schema_prefix(event :: struct) :: String.t() | nil
@doc """
The optional `schema_prefix/2` callback function defined in a projector is
used to set the schema of the `projection_versions` table used by the
projector for idempotency checks.
It is passed the event and its metadata, and must return the schema name, as a
string, or `nil`
"""
@callback schema_prefix(event :: struct(), metadata :: map()) :: String.t() | nil
defp __include_schema_prefix__(schema_prefix) do
quote do
cond do
is_nil(unquote(schema_prefix)) ->
def schema_prefix(_event), do: nil
def schema_prefix(event, _metadata), do: schema_prefix(event)
is_binary(unquote(schema_prefix)) ->
def schema_prefix(_event), do: nil
def schema_prefix(_event, _metadata), do: unquote(schema_prefix)
is_function(unquote(schema_prefix), 1) ->
def schema_prefix(event), do: nil
def schema_prefix(event, _metadata), do: apply(unquote(schema_prefix), [event])
is_function(unquote(schema_prefix), 2) ->
def schema_prefix(event), do: nil
def schema_prefix(event, metadata), do: apply(unquote(schema_prefix), [event, metadata])
true ->
raise ArgumentError,
message:
"expected :schema_prefix option to be a string or a one-arity or two-arity function, but got: " <>
inspect(unquote(schema_prefix))
end
end
end
defp __include_projection_version_schema__ do
quote do
defmodule ProjectionVersion do
@moduledoc false
use Ecto.Schema
import Ecto.Changeset
@primary_key {:projection_name, :string, []}
schema "projection_versions" do
field(:last_seen_event_number, :integer)
timestamps(type: :naive_datetime_usec)
end
@required_fields ~w(last_seen_event_number)a
def changeset(model, params \\ :invalid) do
cast(model, params, @required_fields)
end
end
end
end
defmacro project(event, do: block) do
IO.warn(
"project macro with \"do end\" block is deprecated; use project/2 with function instead",
Macro.Env.stacktrace(__ENV__)
)
quote do
def handle(unquote(event) = event, metadata) do
update_projection(event, metadata, fn var!(multi) ->
unquote(block)
end)
end
end
end
@doc """
Project a domain event into a read model by appending one or more operations
to the `Ecto.Multi` struct passed to the projection function you define
The operations will be executed in a database transaction including an
idempotency check to guarantee an event cannot be projected more than once.
## Example
project %AnEvent{}, fn multi ->
Ecto.Multi.insert(multi, :my_projection, %MyProjection{...})
end
"""
defmacro project(event, lambda) do
quote do
def handle(unquote(event) = event, metadata) do
update_projection(event, metadata, unquote(lambda))
end
def unsafe_handle(unquote(event) = event, metadata) do
unsafe_update_projection(event, metadata, unquote(lambda))
end
end
end
defmacro project(event, metadata, do: block) do
IO.warn(
"project macro with \"do end\" block is deprecated; use project/3 with function instead",
Macro.Env.stacktrace(__ENV__)
)
quote do
def handle(unquote(event) = event, unquote(metadata) = metadata) do
update_projection(event, metadata, fn var!(multi) ->
unquote(block)
end)
end
end
end
@doc """
Project a domain event and its metadata map into a read model by appending one
or more operations to the `Ecto.Multi` struct passed to the projection
function you define.
The operations will be executed in a database transaction including an
idempotency check to guarantee an event cannot be projected more than once.
## Example
project %AnEvent{}, metadata, fn multi ->
Ecto.Multi.insert(multi, :my_projection, %MyProjection{...})
end
"""
defmacro project(event, metadata, lambda) do
quote do
def handle(unquote(event) = event, unquote(metadata) = metadata) do
update_projection(event, metadata, unquote(lambda))
end
end
end
defmacro __before_compile__(_env) do
quote generated: true do
@doc false
def unsafe_handle(_event, _metadata), do: :ok
end
end
end
| 31.01194 | 112 | 0.635191 |
ff41b96fb00c8a819a3fc7de42937db658d4fad1 | 1,060 | ex | Elixir | lib/callbackex/callback.ex | secretworry/callbackex | 7cd63d45068e0612bd3b76e759332392dcf54676 | [
"Apache-2.0"
] | 4 | 2016-12-03T07:14:52.000Z | 2019-08-17T23:09:28.000Z | lib/callbackex/callback.ex | secretworry/callbackex | 7cd63d45068e0612bd3b76e759332392dcf54676 | [
"Apache-2.0"
] | null | null | null | lib/callbackex/callback.ex | secretworry/callbackex | 7cd63d45068e0612bd3b76e759332392dcf54676 | [
"Apache-2.0"
] | null | null | null | defmodule Callbackex.Callback do
@moduledoc """
The Callback specification
There are two kind of callbacks: function callbacks and module callbacks
## Function callbacks
A function callback is any function that receives a value and a set of options and return a new value.
Its type signature must be
`(any, Keyword.t) -> any`
## Module callbacks
A module callback is a module that must export:
* a `call/2` function to process the value passed in
* a `init/1` function whick takes a set of options and initialize the callback
The result of `init/1` is passed to `call/2` as the second argument.
# Pipeline
The `Callbackex.Callbacks` provides methods to execute a callback pipeline
"""
@type t :: module
@type result_t ::
{:ok, any}
| {:error, any}
@type opts :: any
@callback init(Keyword.t) :: opts
@callback call(any, opts) :: result_t
defmacro __using__(_opts) do
quote do
@behaviour unquote(__MODULE__)
def init(opts), do: opts
defoverridable [init: 1]
end
end
end | 24.651163 | 104 | 0.690566 |
ff41fd5be58ed1ee74644e1d4431b2f3b711336a | 1,320 | ex | Elixir | lib/surface_bootstrap/form/url_input.ex | dclausen/surface_bootstrap | f282b7c653160fb4b4ca687cf8fb13e68937884f | [
"MIT"
] | 17 | 2021-02-18T20:10:52.000Z | 2021-12-30T14:41:00.000Z | lib/surface_bootstrap/form/url_input.ex | dclausen/surface_bootstrap | f282b7c653160fb4b4ca687cf8fb13e68937884f | [
"MIT"
] | 9 | 2021-03-21T20:15:48.000Z | 2021-06-23T07:45:20.000Z | lib/surface_bootstrap/form/url_input.ex | dclausen/surface_bootstrap | f282b7c653160fb4b4ca687cf8fb13e68937884f | [
"MIT"
] | 10 | 2021-03-21T20:10:37.000Z | 2021-12-28T00:06:06.000Z | defmodule SurfaceBootstrap.Form.UrlInput do
@moduledoc """
The url input element as defined here:
- https://hexdocs.pm/phoenix_html/Phoenix.HTML.Form.html#url_input/3
- https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input/url
"""
use Surface.Component
use SurfaceBootstrap.Form.TextInputBase
alias Surface.Components.Form.UrlInput
@doc "Max length of field, as enforced by client browser. Not validated by Elixir."
prop maxlength, :integer
@doc "Minimum length of field, as enforced by client browser. Not validated by Elixir."
prop minlength, :integer
def render(assigns) do
~F"""
<FieldContext name={@field}>
{raw(optional_div(assigns))}
<Label :if={@label && !@in_group && !@floating_label} class="form-label">{@label}</Label>
<UrlInput
class={input_classes(assigns) ++ @class}
field={@field}
value={@value}
:props={default_surface_input_props(assigns)}
opts={default_core_input_opts(assigns) ++ @opts}
/>
<Label :if={@label && !@in_group && @floating_label} class="form-label">{@label}</Label>
<BootstrapErrorTag has_error={has_error?(assigns)} has_change={has_change?(assigns)} />
{help_text(assigns)}
<#Raw :if={!@in_group}></div></#Raw>
</FieldContext>
"""
end
end
| 33.846154 | 95 | 0.669697 |
ff421c0e23fb3d3ddaaed7c6c41e5c812abd1b77 | 2,149 | ex | Elixir | web/controllers/page_controller.ex | iboard/px.iboard.cc | f7df724931b977e47e60ab8cab822f69df7e77f8 | [
"MIT"
] | 2 | 2016-07-28T11:42:09.000Z | 2016-09-12T13:56:53.000Z | web/controllers/page_controller.ex | iboard/px.iboard.cc | f7df724931b977e47e60ab8cab822f69df7e77f8 | [
"MIT"
] | null | null | null | web/controllers/page_controller.ex | iboard/px.iboard.cc | f7df724931b977e47e60ab8cab822f69df7e77f8 | [
"MIT"
] | null | null | null | defmodule Iboard.PageController do
use Iboard.Web, :controller
alias Iboard.Page
plug :scrub_params, "page" when action in [:create, :update]
def index(conn, params) do
pages = case params["order"] do
"asc" -> Repo.all(from p in Page, order_by: [asc: p.updated_at])
_ -> Repo.all(from p in Page, order_by: [desc: p.updated_at])
end
render(conn, "index.html", pages: pages)
end
def new(conn, _params) do
changeset = Page.changeset(%Page{})
render(conn, "new.html", changeset: changeset)
end
def create(conn, %{"page" => page_params}) do
changeset = Page.changeset(%Page{}, page_params)
case Repo.insert(changeset) do
{:ok, _page} ->
conn
|> put_flash(:info, "Page created successfully.")
|> redirect(to: page_path(conn, :index))
{:error, changeset} ->
render(conn, "new.html", changeset: changeset)
end
end
def show(conn, %{"id" => id}) do
page = Repo.get!(Page, id)
render(conn, "show.html", page: page)
end
def edit(conn, %{"id" => id}) do
page = Repo.get!(Page, id)
changeset = Page.changeset(page)
render(conn, "edit.html", page: page, changeset: changeset)
end
def update(conn, %{"id" => id, "page" => page_params}) do
page = Repo.get!(Page, id)
changeset = Page.changeset(page, page_params)
case Repo.update(changeset) do
{:ok, page} ->
conn
|> put_flash(:info, "Page updated successfully.")
|> redirect(to: page_path(conn, :show, page))
{:error, changeset} ->
render(conn, "edit.html", page: page, changeset: changeset)
end
end
def delete(conn, %{"id" => id}) do
page = Repo.get!(Page, id)
# Here we use delete! (with a bang) because we expect
# it to always work (and if it does not, it will raise).
Repo.delete!(page)
conn
|> put_flash(:info, "Page deleted successfully.")
|> redirect(to: page_path(conn, :index))
end
def md(conn, %{"filename" => filename}) do
file = String.upcase(filename)
{:ok, text } = File.read("#{file}.md")
render conn, "markdown.html", text: text
end
end
| 27.551282 | 70 | 0.609121 |
ff4230aa02f22480c1bb77f97266d5c959985ae0 | 9,195 | ex | Elixir | lib/mix/tasks/appsignal.diagnose.ex | lukerandall/appsignal-elixir | eac4a7e44354bfa2de69ea8a9b0e27157db2e4c8 | [
"MIT"
] | null | null | null | lib/mix/tasks/appsignal.diagnose.ex | lukerandall/appsignal-elixir | eac4a7e44354bfa2de69ea8a9b0e27157db2e4c8 | [
"MIT"
] | null | null | null | lib/mix/tasks/appsignal.diagnose.ex | lukerandall/appsignal-elixir | eac4a7e44354bfa2de69ea8a9b0e27157db2e4c8 | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.Appsignal.Diagnose do
use Mix.Task
alias Appsignal.Config
alias Appsignal.Diagnose
@system Application.get_env(:appsignal, :appsignal_system, Appsignal.System)
@report Application.get_env(:appsignal, :appsignal_diagnose_report, Appsignal.Diagnose.Report)
@shortdoc "Starts and tests AppSignal while validating the configuration."
def run(args) do
send_report =
cond do
is_nil(args) -> nil
Enum.member?(args, "--send-report") -> :send_report
Enum.member?(args, "--no-send-report") -> :no_send_report
true -> nil
end
Application.load(:appsignal)
report = %{process: %{uid: @system.uid}}
configure_appsignal()
config_report = Diagnose.Config.config()
config = config_report[:options]
report = Map.put(report, :config, config_report)
header()
empty_line()
library_report = Diagnose.Library.info()
report = Map.put(report, :library, library_report)
print_library_info(library_report)
empty_line()
host_report = Diagnose.Host.info()
report = Map.put(report, :host, host_report)
print_host_information(host_report)
empty_line()
report =
case Diagnose.Agent.report() do
{:ok, agent_report} ->
print_agent_diagnostics(agent_report)
Map.put(report, :agent, agent_report)
{:error, :nif_not_loaded} ->
IO.puts("Agent diagnostics")
IO.puts(" Error: Nif not loaded, aborting.\n")
report
{:error, raw_report} ->
IO.puts("Agent diagnostics")
IO.puts(" Error: Could not parse the agent report:")
IO.puts(" Output: #{raw_report}\n")
Map.put(report, :agent, %{output: raw_report})
end
print_configuration(config_report)
empty_line()
validation_report = Diagnose.Validation.validate(config)
report = Map.put(report, :validation, validation_report)
print_validation(validation_report)
empty_line()
path_report = Diagnose.Paths.info()
report = Map.put(report, :paths, path_report)
print_paths(path_report)
empty_line()
send_report_to_appsignal_if_agreed_upon(config, report, send_report)
end
defp header do
IO.puts("AppSignal diagnose")
IO.puts(String.duplicate("=", 80))
IO.puts("Use this information to debug your configuration.")
IO.puts("More information is available on the documentation site.")
IO.puts("http://docs.appsignal.com/")
IO.puts("Send this output to support@appsignal.com if you need help.")
IO.puts(String.duplicate("=", 80))
end
defp print_library_info(library_report) do
IO.puts("AppSignal agent")
IO.puts(" Language: Elixir")
IO.puts(" Package version: #{library_report[:package_version]}")
IO.puts(" Agent version: #{library_report[:agent_version]}")
IO.puts(" Agent architecture: #{library_report[:agent_architecture]}")
IO.puts(" Nif loaded: #{yes_or_no(library_report[:extension_loaded])}")
end
defp print_host_information(host_report) do
IO.puts("Host information")
IO.puts(" Architecture: #{host_report[:architecture]}")
IO.puts(" Elixir version: #{host_report[:language_version]}")
IO.puts(" OTP version: #{host_report[:otp_version]}")
IO.puts(" Operating System: #{host_report[:os]}")
root_user = if host_report[:root], do: "yes (not recommended)", else: "no"
IO.puts(" root user: #{root_user}")
if host_report[:heroku] do
IO.puts(" Heroku: yes")
end
IO.puts(" Container: #{yes_or_no(host_report[:running_in_container])}")
end
defp configure_appsignal do
Config.initialize()
Config.write_to_environment()
end
defp print_agent_diagnostics(report) do
Diagnose.Agent.print(report)
end
defp print_configuration(config) do
IO.puts("Configuration")
Enum.each(config[:options], fn {key, _} = option ->
config_label = configuration_option_label(option)
option_sources = config[:sources]
sources = sources_for_option(key, option_sources)
sources_label = configuration_option_source_label(key, sources, option_sources)
IO.puts("#{config_label}#{sources_label}")
end)
IO.puts(
"\nRead more about how the diagnose config output is rendered\n" <>
"https://docs.appsignal.com/elixir/command-line/diagnose.html"
)
end
defp configuration_option_label({key, value}) do
" #{key}: #{format_value(value)}"
end
defp configuration_option_source_label(_, [], _), do: ""
defp configuration_option_source_label(_, [:default], _), do: ""
defp configuration_option_source_label(_, sources, _) when length(sources) == 1 do
" (Loaded from #{Enum.join(sources, ", ")})"
end
defp configuration_option_source_label(key, sources, option_sources) do
max_source_label_length =
sources
|> Enum.map(fn source ->
source
|> to_string
|> String.length()
end)
|> Enum.max()
# + 1 to account for the : symbol
max_source_label_length = max_source_label_length + 1
sources_label =
sources
|> Enum.map(fn source ->
label = String.pad_trailing("#{source}:", max_source_label_length)
" #{label} #{format_value(option_sources[source][key])}"
end)
|> Enum.join("\n")
"\n Sources:\n#{sources_label}"
end
defp sources_for_option(key, sources) do
[:default, :system, :file, :env]
|> Enum.map(fn source ->
if Map.has_key?(sources[source], key) do
source
end
end)
|> Enum.reject(fn value -> value == nil end)
end
defp format_value(value) do
inspect(value)
end
defp print_validation(validation_report) do
IO.puts("Validation")
IO.puts(" Push API key: #{validation_report[:push_api_key]}")
end
defp print_paths(path_report) do
IO.puts("Paths")
labels = Diagnose.Paths.labels()
Enum.each(path_report, fn {name, path} ->
print_path(path, Map.fetch!(labels, name))
end)
end
defp print_path(path, label) do
IO.puts(" #{label}")
IO.puts(" Path: #{inspect(path[:path])}")
if path[:exists] do
IO.puts(" Writable?: #{yes_or_no(path[:writable])}")
file_uid = path[:ownership][:uid]
process_uid = @system.uid
IO.write(" Ownership?: #{yes_or_no(file_uid == process_uid)}")
IO.puts(" (file: #{file_uid}, process: #{process_uid})")
else
IO.puts(" Exists?: no")
end
if path[:content] do
IO.puts(" Contents (last 10 lines):")
Enum.each(Enum.take(path[:content], -10), &IO.puts/1)
end
if path[:error], do: IO.puts(" Error: #{path[:error]}")
end
defp send_report_to_appsignal_if_agreed_upon(config, report, send_report) do
IO.puts("\nDiagnostics report")
IO.puts(" Do you want to send this diagnostics report to AppSignal?")
IO.puts(
" If you share this report you will be given a link to \n" <>
" AppSignal.com to validate the report.\n" <>
" You can also contact us at support@appsignal.com\n with your support token.\n\n"
)
answer =
case send_report do
:send_report ->
IO.puts(" Confirmed sending report using --send-report option.")
true
:no_send_report ->
IO.puts(" Not sending report. (Specified with the --no-send-report option.)")
false
_ ->
yes_or_no?(" Send diagnostics report to AppSignal? (Y/n): ")
end
case answer do
true ->
IO.puts("\n Transmitting diagnostics report")
send_report_to_appsignal(config, report)
false ->
IO.puts(" Not sending diagnostics report to AppSignal.")
end
end
def send_report_to_appsignal(config, report) do
case @report.send(config, report) do
{:ok, support_token} ->
IO.puts(" Your support token: #{support_token}")
IO.puts(" View this report: https://appsignal.com/diagnose/#{support_token}")
{:error, %{status_code: 200, body: body}} ->
IO.puts(" Error: Couldn't decode server response.")
IO.puts(" Response body: #{body}")
{:error, %{status_code: status_code, body: body}} ->
IO.puts(" Error: Something went wrong while submitting the report to AppSignal.")
IO.puts(" Response code: #{status_code}")
IO.puts(" Response body: #{body}")
{:error, %{reason: reason}} ->
IO.puts(" Error: Something went wrong while submitting the report to AppSignal.")
IO.puts(reason)
end
end
defp empty_line, do: IO.puts("")
defp yes_or_no(true), do: "yes"
defp yes_or_no(false), do: "no"
# Ask for a yes or no input from the user
defp yes_or_no?(prompt) do
case IO.gets(prompt) do
input when is_binary(input) ->
case String.downcase(String.trim(input)) do
input when input in ["y", "yes", ""] -> true
input when input in ["n", "no"] -> false
_ -> yes_or_no?(prompt)
end
:eof ->
yes_or_no?(prompt)
{:error, reason} ->
IO.puts(" Error while reading input: #{reason}")
yes_or_no?(prompt)
end
end
end
| 30.65 | 96 | 0.638282 |
ff425cb25f786b2d66ae40decad558930c523d48 | 10,216 | exs | Elixir | lib/elixir/test/elixir/code_normalizer/formatted_ast_test.exs | SkyllaTech/elixir | 2642226f35e23fd68e4b246e0483057716dac4ca | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/code_normalizer/formatted_ast_test.exs | SkyllaTech/elixir | 2642226f35e23fd68e4b246e0483057716dac4ca | [
"Apache-2.0"
] | 1 | 2021-05-22T13:12:51.000Z | 2021-05-22T13:12:51.000Z | lib/elixir/test/elixir/code_normalizer/formatted_ast_test.exs | SkyllaTech/elixir | 2642226f35e23fd68e4b246e0483057716dac4ca | [
"Apache-2.0"
] | 1 | 2021-11-08T10:21:27.000Z | 2021-11-08T10:21:27.000Z | Code.require_file("../test_helper.exs", __DIR__)
defmodule Code.Normalizer.FormatterASTTest do
use ExUnit.Case, async: true
defmacro assert_same(good, opts \\ []) do
quote bind_quoted: [good: good, opts: opts], location: :keep do
assert IO.iodata_to_binary(Code.format_string!(good, opts)) ==
string_to_string(good, opts)
end
end
def string_to_string(good, opts) do
line_length = Keyword.get(opts, :line_length, 98)
good = String.trim(good)
to_quoted_opts =
Keyword.merge(
[
literal_encoder: &{:ok, {:__block__, &2, [&1]}},
token_metadata: true,
unescape: false
],
opts
)
{quoted, comments} = Code.string_to_quoted_with_comments!(good, to_quoted_opts)
to_algebra_opts = [comments: comments, escape: false] ++ opts
quoted
|> Code.quoted_to_algebra(to_algebra_opts)
|> Inspect.Algebra.format(line_length)
|> IO.iodata_to_binary()
end
describe "integers" do
test "in decimal base" do
assert_same "0"
assert_same "100"
assert_same "007"
assert_same "10000"
assert_same "100_00"
end
test "in binary base" do
assert_same "0b0"
assert_same "0b1"
assert_same "0b101"
assert_same "0b01"
assert_same "0b111_111"
end
test "in octal base" do
assert_same "0o77"
assert_same "0o0"
assert_same "0o01"
assert_same "0o777_777"
end
test "in hex base" do
assert_same "0x1"
assert_same "0x01"
end
test "as chars" do
assert_same "?a"
assert_same "?1"
assert_same "?è"
assert_same "??"
assert_same "?\\\\"
assert_same "?\\s"
assert_same "?🎾"
end
end
describe "floats" do
test "with normal notation" do
assert_same "0.0"
assert_same "1.0"
assert_same "123.456"
assert_same "0.0000001"
assert_same "001.100"
assert_same "0_10000_0.000_000"
end
test "with scientific notation" do
assert_same "1.0e1"
assert_same "1.0e-1"
assert_same "1.0e01"
assert_same "1.0e-01"
assert_same "001.100e-010"
assert_same "0_100_0000.100e-010"
end
end
describe "atoms" do
test "true, false, nil" do
assert_same "nil"
assert_same "true"
assert_same "false"
end
test "without escapes" do
assert_same ~S[:foo]
end
test "with escapes" do
assert_same ~S[:"f\a\b\ro"]
end
test "with unicode" do
assert_same ~S[:ólá]
end
test "does not reformat aliases" do
assert_same ~S[:"Elixir.String"]
end
test "quoted operators" do
assert_same ~S[:"::"]
assert_same ~S[:"..//"]
assert_same ~S{["..//": 1]}
end
test "with interpolation" do
assert_same ~S[:"one #{2} three"]
end
test "with escapes and interpolation" do
assert_same ~S[:"one\n\"#{2}\"\nthree"]
end
end
describe "strings" do
test "without escapes" do
assert_same ~S["foo"]
end
test "with escapes" do
assert_same ~S["\x0A"]
assert_same ~S["f\a\b\ro"]
assert_same ~S["double \" quote"]
end
test "keeps literal new lines" do
assert_same """
"fo
o"
"""
end
test "with interpolation" do
assert_same ~S["one #{} three"]
assert_same ~S["one #{2} three"]
end
test "with escaped interpolation" do
assert_same ~S["one\#{two}three"]
end
test "with escapes and interpolation" do
assert_same ~S["one\n\"#{2}\"\nthree"]
end
end
describe "charlists" do
test "without escapes" do
assert_same ~S['']
assert_same ~S[' ']
assert_same ~S['foo']
end
test "with escapes" do
assert_same ~S['f\a\b\ro']
assert_same ~S['single \' quote']
end
test "keeps literal new lines" do
assert_same """
'fo
o'
"""
end
test "with interpolation" do
assert_same ~S['one #{2} three']
end
test "with escape and interpolation" do
assert_same ~S['one\n\'#{2}\'\nthree']
end
end
describe "string heredocs" do
test "without escapes" do
assert_same ~S'''
"""
hello
"""
'''
end
test "with escapes" do
assert_same ~S'''
"""
f\a\b\ro
"""
'''
assert_same ~S'''
"""
multiple "\"" quotes
"""
'''
end
test "with interpolation" do
assert_same ~S'''
"""
one
#{2}
three
"""
'''
assert_same ~S'''
"""
one
"
#{2}
"
three
"""
'''
end
test "nested with empty lines" do
assert_same ~S'''
nested do
"""
foo
bar
"""
end
'''
end
test "nested with empty lines and interpolation" do
assert_same ~S'''
nested do
"""
#{foo}
#{bar}
"""
end
'''
assert_same ~S'''
nested do
"""
#{foo}
#{bar}
"""
end
'''
end
test "with escaped new lines" do
assert_same ~S'''
"""
one\
#{"two"}\
three\
"""
'''
end
end
describe "charlist heredocs" do
test "without escapes" do
assert_same ~S"""
'''
hello
'''
"""
end
test "with escapes" do
assert_same ~S"""
'''
f\a\b\ro
'''
"""
assert_same ~S"""
'''
multiple "\"" quotes
'''
"""
end
test "with interpolation" do
assert_same ~S"""
'''
one
#{2}
three
'''
"""
assert_same ~S"""
'''
one
"
#{2}
"
three
'''
"""
end
end
describe "keyword list" do
test "blocks" do
assert_same ~S"""
defmodule Example do
def sample, do: :ok
end
"""
end
test "omitting brackets" do
assert_same ~S"""
@type foo :: a when b: :c
"""
end
test "last tuple element as keyword list keeps its format" do
assert_same ~S"{:wrapped, [opt1: true, opt2: false]}"
assert_same ~S"{:unwrapped, opt1: true, opt2: false}"
assert_same ~S"{:wrapped, 1, [opt1: true, opt2: false]}"
assert_same ~S"{:unwrapped, 1, opt1: true, opt2: false}"
end
test "on module attribute" do
assert_same ~S"""
@foo a: b,
c: d
"""
assert_same ~S"@foo [
a: b,
c: d
]"
end
end
describe "preserves user choice on parenthesis" do
test "in functions with do blocks" do
assert_same(~S"""
foo Bar do
:ok
end
""")
assert_same(~S"""
foo(Bar) do
:ok
end
""")
end
end
describe "preserves formatting for sigils" do
test "without interpolation" do
assert_same ~S[~s(foo)]
assert_same ~S[~s{foo bar}]
assert_same ~S[~r/Bar Baz/]
assert_same ~S[~w<>]
assert_same ~S[~W()]
end
test "with escapes" do
assert_same ~S[~s(foo \) bar)]
assert_same ~S[~s(f\a\b\ro)]
assert_same ~S"""
~S(foo\
bar)
"""
end
test "with nested new lines" do
assert_same ~S"""
foo do
~S(foo\
bar)
end
"""
assert_same ~S"""
foo do
~s(#{bar}
)
end
"""
end
test "with interpolation" do
assert_same ~S[~s(one #{2} three)]
end
test "with modifiers" do
assert_same ~S[~w(one two three)a]
assert_same ~S[~z(one two three)foo]
end
test "with heredoc syntax" do
assert_same ~S"""
~s'''
one\a
#{:two}\r
three\0
'''
"""
assert_same ~S'''
~s"""
one\a
#{:two}\r
three\0
"""
'''
end
test "with heredoc syntax and modifier" do
assert_same ~S"""
~s'''
foo
'''rsa
"""
end
end
describe "preserves comments formatting" do
test "before and after expressions" do
assert_same """
# before comment
:hello
"""
assert_same """
:hello
# after comment
"""
assert_same """
# before comment
:hello
# after comment
"""
end
test "empty comment" do
assert_same """
#
:foo
"""
end
test "handles comments with unescaped literal" do
assert_same """
# before
Mix.install([:foo])
# after
""",
literal_encoder: fn literal, _ -> {:ok, literal} end
assert_same """
# before
Mix.install([1 + 2, :foo])
# after
""",
literal_encoder: fn literal, _ -> {:ok, literal} end
assert_same """
# before
Mix.install([:foo, 1 + 2])
# after
""",
literal_encoder: fn literal, _ -> {:ok, literal} end
end
test "before and after expressions with newlines" do
assert_same """
# before comment
# second line
:hello
# middle comment 1
#
# middle comment 2
:world
# after comment
# second line
"""
end
test "interpolation with comment outside before and after" do
assert_same ~S"""
# comment
IO.puts("Hello #{world}")
"""
assert_same ~S"""
IO.puts("Hello #{world}")
# comment
"""
end
test "blocks with keyword list" do
assert_same ~S"""
defp sample do
[
# comment
{:a, "~> 1.2"}
]
end
"""
assert_same ~S"""
defp sample do
[
# comment
{:a, "~> 1.2"},
{:b, "~> 1.2"}
]
end
"""
end
test "keyword literals with variable values" do
assert_same(~S"""
foo = foo()
[foo: foo]
""")
end
end
end
| 18.04947 | 83 | 0.494323 |
ff42e09e0b379d83a099cb2832dcd14a410d8101 | 1,113 | exs | Elixir | config/config.exs | ddombrow/preston | 13d856106a15b07ae909f4ae3d346d050dd2ff72 | [
"MIT"
] | null | null | null | config/config.exs | ddombrow/preston | 13d856106a15b07ae909f4ae3d346d050dd2ff72 | [
"MIT"
] | null | null | null | config/config.exs | ddombrow/preston | 13d856106a15b07ae909f4ae3d346d050dd2ff72 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :lewis, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:lewis, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 35.903226 | 73 | 0.750225 |
ff42ff1fac000cc008589c8988c81902c5b292ab | 5,923 | exs | Elixir | test/blog/comp/comp_test.exs | ench0/blog | 04f7df2357b13dddee9d82cd1c35bbd0ce9618a9 | [
"MIT"
] | 2 | 2017-06-08T23:28:13.000Z | 2017-06-08T23:28:16.000Z | test/blog/comp/comp_test.exs | ench0/blog | 04f7df2357b13dddee9d82cd1c35bbd0ce9618a9 | [
"MIT"
] | null | null | null | test/blog/comp/comp_test.exs | ench0/blog | 04f7df2357b13dddee9d82cd1c35bbd0ce9618a9 | [
"MIT"
] | null | null | null | defmodule Blog.CompTest do
use Blog.DataCase
alias Blog.Comp
describe "forms" do
alias Blog.Comp.Form
@valid_attrs %{active: true, info: "some info", title: "some title"}
@update_attrs %{active: false, info: "some updated info", title: "some updated title"}
@invalid_attrs %{active: nil, info: nil, title: nil}
def form_fixture(attrs \\ %{}) do
{:ok, form} =
attrs
|> Enum.into(@valid_attrs)
|> Comp.create_form()
form
end
test "list_forms/0 returns all forms" do
form = form_fixture()
assert Comp.list_forms() == [form]
end
test "get_form!/1 returns the form with given id" do
form = form_fixture()
assert Comp.get_form!(form.id) == form
end
test "create_form/1 with valid data creates a form" do
assert {:ok, %Form{} = form} = Comp.create_form(@valid_attrs)
assert form.active == true
assert form.info == "some info"
assert form.title == "some title"
end
test "create_form/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Comp.create_form(@invalid_attrs)
end
test "update_form/2 with valid data updates the form" do
form = form_fixture()
assert {:ok, form} = Comp.update_form(form, @update_attrs)
assert %Form{} = form
assert form.active == false
assert form.info == "some updated info"
assert form.title == "some updated title"
end
test "update_form/2 with invalid data returns error changeset" do
form = form_fixture()
assert {:error, %Ecto.Changeset{}} = Comp.update_form(form, @invalid_attrs)
assert form == Comp.get_form!(form.id)
end
test "delete_form/1 deletes the form" do
form = form_fixture()
assert {:ok, %Form{}} = Comp.delete_form(form)
assert_raise Ecto.NoResultsError, fn -> Comp.get_form!(form.id) end
end
test "change_form/1 returns a form changeset" do
form = form_fixture()
assert %Ecto.Changeset{} = Comp.change_form(form)
end
end
describe "images" do
alias Blog.Comp.Image
@valid_attrs %{image: "some image", name: "some name"}
@update_attrs %{image: "some updated image", name: "some updated name"}
@invalid_attrs %{image: nil, name: nil}
def image_fixture(attrs \\ %{}) do
{:ok, image} =
attrs
|> Enum.into(@valid_attrs)
|> Comp.create_image()
image
end
test "list_images/0 returns all images" do
image = image_fixture()
assert Comp.list_images() == [image]
end
test "get_image!/1 returns the image with given id" do
image = image_fixture()
assert Comp.get_image!(image.id) == image
end
test "create_image/1 with valid data creates a image" do
assert {:ok, %Image{} = image} = Comp.create_image(@valid_attrs)
assert image.image == "some image"
assert image.name == "some name"
end
test "create_image/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Comp.create_image(@invalid_attrs)
end
test "update_image/2 with valid data updates the image" do
image = image_fixture()
assert {:ok, image} = Comp.update_image(image, @update_attrs)
assert %Image{} = image
assert image.image == "some updated image"
assert image.name == "some updated name"
end
test "update_image/2 with invalid data returns error changeset" do
image = image_fixture()
assert {:error, %Ecto.Changeset{}} = Comp.update_image(image, @invalid_attrs)
assert image == Comp.get_image!(image.id)
end
test "delete_image/1 deletes the image" do
image = image_fixture()
assert {:ok, %Image{}} = Comp.delete_image(image)
assert_raise Ecto.NoResultsError, fn -> Comp.get_image!(image.id) end
end
test "change_image/1 returns a image changeset" do
image = image_fixture()
assert %Ecto.Changeset{} = Comp.change_image(image)
end
end
describe "files" do
alias Blog.Comp.File
@valid_attrs %{file: "some file", name: "some name"}
@update_attrs %{file: "some updated file", name: "some updated name"}
@invalid_attrs %{file: nil, name: nil}
def file_fixture(attrs \\ %{}) do
{:ok, file} =
attrs
|> Enum.into(@valid_attrs)
|> Comp.create_file()
file
end
test "list_files/0 returns all files" do
file = file_fixture()
assert Comp.list_files() == [file]
end
test "get_file!/1 returns the file with given id" do
file = file_fixture()
assert Comp.get_file!(file.id) == file
end
test "create_file/1 with valid data creates a file" do
assert {:ok, %File{} = file} = Comp.create_file(@valid_attrs)
assert file.file == "some file"
assert file.name == "some name"
end
test "create_file/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Comp.create_file(@invalid_attrs)
end
test "update_file/2 with valid data updates the file" do
file = file_fixture()
assert {:ok, file} = Comp.update_file(file, @update_attrs)
assert %File{} = file
assert file.file == "some updated file"
assert file.name == "some updated name"
end
test "update_file/2 with invalid data returns error changeset" do
file = file_fixture()
assert {:error, %Ecto.Changeset{}} = Comp.update_file(file, @invalid_attrs)
assert file == Comp.get_file!(file.id)
end
test "delete_file/1 deletes the file" do
file = file_fixture()
assert {:ok, %File{}} = Comp.delete_file(file)
assert_raise Ecto.NoResultsError, fn -> Comp.get_file!(file.id) end
end
test "change_file/1 returns a file changeset" do
file = file_fixture()
assert %Ecto.Changeset{} = Comp.change_file(file)
end
end
end
| 30.530928 | 90 | 0.637852 |
ff433400a3f761017e5c4ebd1d126bc74759dccc | 3,252 | ex | Elixir | apps/ewallet_config/lib/ewallet_config/validator.ex | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_config/lib/ewallet_config/validator.ex | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_config/lib/ewallet_config/validator.ex | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWalletConfig.Validator do
@moduledoc """
Custom validators that extend Ecto.Changeset's list of built-in validators.
"""
alias Ecto.Changeset
@doc """
Validates that only one out of the provided fields can have value.
"""
def validate_required_exclusive(changeset, attrs) when is_map(attrs) or is_list(attrs) do
case count_fields_present(changeset, attrs) do
1 ->
changeset
n when n > 1 ->
Changeset.add_error(
changeset,
attrs,
"only one must be present",
validation: :only_one_required
)
_ ->
Changeset.add_error(
changeset,
attrs,
"can't all be blank",
validation: :required_exclusive
)
end
end
@doc """
Validates that either all or none the given fields are present.
"""
def validate_required_all_or_none(changeset, attrs) do
num_attrs = Enum.count(attrs)
missing_attrs = Enum.filter(attrs, fn attr -> !field_present?(changeset, attr) end)
case Enum.count(missing_attrs) do
0 ->
changeset
^num_attrs ->
changeset
_ ->
Changeset.add_error(
changeset,
attrs,
"either all or none of them must be present",
validation: "all_or_none"
)
end
end
@doc """
Validates that only one out of the provided fields can have value but
both can be nil.
"""
def validate_exclusive(changeset, attrs) when is_map(attrs) or is_list(attrs) do
case count_fields_present(changeset, attrs) do
n when n > 1 ->
Changeset.add_error(
changeset,
attrs,
"only one must be present",
validation: :only_one_required
)
_ ->
changeset
end
end
@doc """
Validates that the value cannot be changed after it has been set.
"""
def validate_immutable(changeset, key) do
changed = Changeset.get_field(changeset, key)
case Map.get(changeset.data, key) do
nil -> changeset
^changed -> changeset
_ -> Changeset.add_error(changeset, key, "can't be changed")
end
end
def count_fields_present(changeset, attrs) do
Enum.count(attrs, fn attr -> field_present?(changeset, attr) end)
end
def field_present?(changeset, attr) when is_atom(attr) do
value = Changeset.get_field(changeset, attr)
value && value != ""
end
def field_present?(changeset, {attr, nil}), do: field_present?(changeset, attr)
def field_present?(changeset, {attr, attr_value}) do
value = Changeset.get_field(changeset, attr)
value && value != "" && value == attr_value
end
end
| 27.327731 | 91 | 0.652214 |
ff433579680453384675024d3d1c669f3d27c784 | 8,053 | exs | Elixir | machine_translation/MorpHIN/Learned/Resources/Set4/TrainingInstances/62.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | machine_translation/MorpHIN/Learned/Resources/Set4/TrainingInstances/62.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | machine_translation/MorpHIN/Learned/Resources/Set4/TrainingInstances/62.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | **EXAMPLE FILE**
pnoun pnoun pnoun cm cm;
pnoun nst particle adjective cm;
cm pnoun noun cm cm;
cm cardinal cardinal noun cm;
pn noun noun verb cm;
adjective noun adjective particle cm;
demonstrative noun adjective noun cm;
cardinal cardinal particle quantifier cm;
pnoun noun noun cm cm;
cm noun quantifier adjective cm;
cm noun adjective verb cm;
pn pnoun noun verb cm;
quantifier noun noun verb cm;
adjective noun particle adjective cm;
pn nst pnoun cm cm;
quantifier noun verb verb_aux cm;
conj adverb noun verb cm;
cm noun neg adjective cm;
conj noun noun verb cm;
pn noun cm verb cm;
pn noun adjective cardinal cm;
cardinal noun noun particle cm;
pn noun noun verb cm;
cm nst pn adjective cm;
cm pnoun adjective noun cm;
cm cm pn noun cm;
pn noun adverb conj cm;
demonstrative noun adjective noun cm;
cm quantifier quantifier adverb particle;
noun cm cardinal noun cm;
adjective noun noun cm cm;
cm cardinal quantifier noun cm;
cm pnoun adjective verb cm;
verb noun verb verb_aux cm;
cm pn adjective pnoun cm;
particle noun verb verb_aux cm;
demonstrative noun adjective neg cm;
noun cm cardinal pnoun cm;
particle noun nst pn cm;
quantifier noun pnoun conj cm;
cm noun pn verb cm;
cm nst cm verb cm;
cm pnoun particle noun cm;
adjective pnoun demonstrative noun cm;
noun nst verb SYM cm;
cm verb verb verb_aux cm;
demonstrative noun adjective quantifier cm;
adjective quantifier quantifier noun particle;
noun pnoun verb pn cm;
pn pnoun verb SYM cm;
ordinal pnoun verb pn cm;
conj pnoun verb SYM cm;
noun noun verb pn cm;
pn pnoun verb SYM cm;
demonstrative noun noun noun cm;
adjective noun adjective verb cm;
cm noun verb verb_aux cm;
cardinal noun pnoun conj cm;
noun noun noun pnoun cm;
pn noun quantifier particle cm;
cm quantifier noun pn particle;
pn verb verb verb_aux cm;
demonstrative noun pn cm cm;
pnoun pnoun pn cm cm;
adjective verb noun cm cm;
pnoun cm noun noun cm;
adjective noun noun verb cm;
cm noun pnoun cm cm;
noun noun pnoun cm cm;
cm noun pnoun cm cm;
quantifier noun verb neg cm;
cardinal pnoun noun cm cm;
cardinal noun particle quantifier cm;
quantifier noun adjective verb cm;
pn noun quantifier noun cm;
demonstrative noun cardinal noun cm;
cardinal noun verb verb_aux cm;
pnoun pnoun noun cm cm;
adjective noun noun cm cm;
adjective noun adjective adjective cm;
pn pnoun noun cm cm;
pnoun pnoun noun pnoun cm;
cm noun adjective noun cm;
cardinal noun particle adjective cm;
pnoun pnoun verb SYM cm;
pnoun pnoun verb verb_aux cm;
noun cm quantifier adjective cm;
pnoun pnoun noun verb cm;
cm adverb adverb conj particle;
adjective noun noun cm cm;
cardinal noun pnoun cm cm;
noun noun pnoun noun cm;
demonstrative noun adjective noun cm;
pnoun pnoun verb verb_aux cm;
adjective noun verb verb_aux cm;
pnoun pnoun adjective noun cm;
cm nst particle pnoun cm;
pnoun noun nst verb cm;
SYM noun verb verb_aux cm;
quantifier noun nst verb cm;
cm quantifier quantifier cardinal cm;
conj cardinal quantifier adjective cm;
adjective noun noun verb cm;
adjective noun noun verb cm;
noun verb noun verb cm;
adjective noun verb verb_aux cm;
nst noun noun cm cm;
cardinal noun quantifier noun cm;
conj pn quantifier noun cm;
adjective noun pnoun conj cm;
cm noun verb verb_aux cm;
SYM pnoun pnoun pnoun cm;
demonstrative noun noun cm cm;
cardinal noun quantifier cm cm;
cm noun verb SYM cm;
noun noun particle noun cm;
cm noun verb verb_aux cm;
adjective noun noun cm cm;
adjective noun verb verb_aux cm;
adjective noun adjective verb cm;
cm pnoun adjective noun cm;
nst verb verb cm cm;
demonstrative noun nst verb cm;
ordinal noun particle noun cm;
cm noun noun cm cm;
adjective verb adjective verb cm;
pn noun quantifier noun cm;
cm noun noun adjective cm;
verb_aux quantifier quantifier cardinal particle;
cardinal cardinal quantifier noun particle;
noun pnoun pnoun verb cm;
SYM pn quantifier noun cm;
conj pnoun adjective verb cm;
SYM noun adjective pnoun cm;
cm cardinal quantifier noun cm;
SYM pn quantifier noun cm;
cm noun verb pnoun cm;
noun pnoun pnoun noun cm;
cm noun quantifier adjective cm;
cm cardinal particle quantifier cm;
cm noun quantifier noun cm;
noun pnoun adverb cardinal cm;
cm cardinal particle quantifier cm;
cm noun quantifier noun cm;
pnoun noun nst pn cm;
cm nst pn cm cm;
SYM cardinal noun verb cm;
SYM cardinal noun adjective cm;
noun pnoun noun verb cm;
pn noun particle verb cm;
cardinal noun quantifier pnoun cm;
nst pnoun pnoun cm cm;
cm noun demonstrative noun cm;
pn noun verb verb_aux cm;
particle noun verb verb cm;
conj noun noun noun cm;
adjective noun verb neg cm;
SYM pn pn quantifier cm;
noun particle adjective noun cm;
pnoun pnoun verb verb cm;
cardinal cardinal particle quantifier cm;
demonstrative noun verb verb cm;
pn noun verb noun cm;
conj pn cardinal noun cm;
verb pnoun cardinal pnoun cm;
cardinal cardinal quantifier noun cm;
noun cm verb noun cm;
verb noun adjective noun cm;
cardinal noun adjective verb cm;
cm nst verb noun cm;
demonstrative noun verb verb_aux cm;
pn noun verb conj cm;
pn noun pn noun cm;
adjective noun pn noun cm;
pn noun nst quantifier cm;
pnoun pnoun noun verb cm;
demonstrative noun quantifier adjective cm;
cm cardinal particle verb cm;
noun pnoun noun pnoun cm;
pnoun pnoun noun pnoun cm;
conj pnoun noun verb cm;
pnoun pnoun verb verb_aux cm;
demonstrative noun adjective noun cm;
cardinal noun noun verb cm;
pnoun pnoun noun cm cm;
cm noun noun verb cm;
pn pnoun adjective noun cm;
SYM noun verb noun cm;
noun noun noun cm cm;
adjective noun noun conj cm;
adjective verb noun verb cm;
cm verb noun verb cm;
cm noun verb verb_aux cm;
pnoun pnoun noun cm cm;
cm pnoun pn noun cm;
particle noun adjective verb cm;
adjective noun noun neg cm;
quantifier noun pnoun cm cm;
noun pnoun verb verb_aux cm;
pnoun adverb verb verb_aux cm;
SYM pnoun noun noun cm;
cardinal pnoun adjective verb cm;
pnoun pnoun noun verb cm;
quantifier noun pnoun cm cm;
conj pnoun pnoun verb cm;
noun pnoun pnoun verb cm;
cardinal pnoun adjective verb cm;
demonstrative noun pn cm cm;
cm noun pnoun noun cm;
cm adverb verb SYM cm;
cm verb noun verb cm;
adjective noun pnoun pnoun cm;
pnoun pnoun cm adjective cm;
adjective noun pn verb cm;
demonstrative noun cm adjective cm;
pn noun noun verb cm;
cm quantifier quantifier cardinal particle;
SYM quantifier noun pnoun particle;
noun verb noun verb cm;
cm pnoun pn noun cm;
cm noun noun cm cm;
conj noun particle verb cm;
noun verb noun verb cm;
adjective noun demonstrative noun cm;
nst noun noun cm cm;
pn noun verb cm cm;
pnoun noun verb verb_aux cm;
demonstrative noun noun verb cm;
noun noun verb SYM cm;
pnoun pnoun noun verb cm;
cm nst particle pnoun cm;
cm nst noun cm cm;
demonstrative noun noun cm cm;
noun pnoun noun verb cm;
pnoun pnoun noun verb cm;
pnoun pnoun noun verb cm;
adjective noun noun verb cm;
cm noun noun verb cm;
quantifier noun adjective verb cm;
noun pnoun verb verb_aux cm;
SYM cardinal nst verb cm;
cm cm noun noun cm;
cm pnoun pnoun cm cm;
adjective noun demonstrative noun cm;
noun noun particle adjective cm;
adjective noun verb verb_aux cm;
conj noun verb verb_aux cm;
pn cm noun neg cm;
pnoun cm verb verb_aux cm;
cm cm verb noun cm;
noun adverb noun verb particle;
cm noun verb verb_aux cm;
particle noun verb verb cm;
noun particle noun noun cm;
cm cm noun verb cm;
cm nst pn noun cm;
cm pnoun noun verb cm;
adjective noun verb cm cm;
adverb noun noun verb cm;
pn verb verb verb_aux cm;
adjective noun noun verb cm;
cm noun noun cm cm;
cm noun verb SYM cm;
SYM pn cardinal verb cm;
SYM pn noun cm cm;
cm noun SYM pnoun cm;
cm noun verb pn cm;
cm noun noun verb cm;
SYM noun demonstrative noun cm;
noun noun verb pn cm;
cm nst pnoun cm cm;
cm pnoun verb verb_aux cm;
adjective noun pn noun cm;
conj quantifier quantifier cardinal particle;
demonstrative noun verb verb_aux cm;
cm cm demonstrative noun cm;
| 28.863799 | 50 | 0.75922 |
ff433b8db7dcef7fa4f28436cfb59cb41ef91296 | 198 | ex | Elixir | testData/org/elixir_lang/parser_definition/bracket_operation_parsing_test_case/InOperator.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/bracket_operation_parsing_test_case/InOperator.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/bracket_operation_parsing_test_case/InOperator.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | Module.in[key: value]
Module.in [key: value]
Module.in[()]
Module.in [()]
Module.in[matched_expression]
Module.in [matched_expression]
Module.in[matched_expression,]
Module.in [matched_expression,]
| 22 | 31 | 0.767677 |
ff433da3741772be2254b63d845aa80a4bbe73e0 | 1,602 | ex | Elixir | clients/home_graph/lib/google_api/home_graph/v1/model/report_state_and_notification_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/home_graph/lib/google_api/home_graph/v1/model/report_state_and_notification_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/home_graph/lib/google_api/home_graph/v1/model/report_state_and_notification_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.HomeGraph.V1.Model.ReportStateAndNotificationResponse do
@moduledoc """
Response type for the
[`ReportStateAndNotification`](#google.home.graph.v1.HomeGraphApiService.ReportStateAndNotification)
call.
## Attributes
* `requestId` (*type:* `String.t`, *default:* `nil`) - Request ID copied from ReportStateAndNotificationRequest.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:requestId => String.t()
}
field(:requestId)
end
defimpl Poison.Decoder, for: GoogleApi.HomeGraph.V1.Model.ReportStateAndNotificationResponse do
def decode(value, options) do
GoogleApi.HomeGraph.V1.Model.ReportStateAndNotificationResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.HomeGraph.V1.Model.ReportStateAndNotificationResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.693878 | 116 | 0.759051 |
ff434256ddb38b70120a8e2a9a398b65ce348622 | 2,951 | ex | Elixir | lib/espec/example.ex | edwinthinks/espec | 4f97ef75e482d61e8a27d7f98a20f53220a08532 | [
"Apache-2.0"
] | null | null | null | lib/espec/example.ex | edwinthinks/espec | 4f97ef75e482d61e8a27d7f98a20f53220a08532 | [
"Apache-2.0"
] | null | null | null | lib/espec/example.ex | edwinthinks/espec | 4f97ef75e482d61e8a27d7f98a20f53220a08532 | [
"Apache-2.0"
] | null | null | null | defmodule ESpec.Example do
@moduledoc """
Defines macros 'example' and 'it'.
These macros defines function with random name which will be called when example runs.
Example structs %ESpec.Example are accumulated in @examples attribute
"""
@doc """
Example struct.
description - the description of example,
module - spec module,
function - random function name,
opts - options,
file - spec file path,
line - the line where example is defined,
context - example context. Accumulator for 'contexts' and 'lets',
shared - marks example as shared,
status - example status (:new, :success, :failure, :pending),
result - the value returned by example block or the pending message,
error - store an error,
duration - test duration.
"""
defstruct description: "",
module: nil,
function: nil,
opts: [],
file: nil,
line: nil,
context: [],
shared: false,
status: :new,
result: nil,
error: nil,
duration: 0
@doc "Context descriptions."
def context_descriptions(example) do
example
|> extract_contexts
|> Enum.map(& &1.description)
end
@doc "Filters success examples."
def success(results), do: Enum.filter(results, &(&1.status == :success))
@doc "Filters failed examples."
def failure(results), do: Enum.filter(results, &(&1.status === :failure))
@doc "Filters pending examples."
def pendings(results), do: Enum.filter(results, &(&1.status === :pending))
@doc "Extracts specific structs from example context."
def extract_befores(example), do: extract(example.context, ESpec.Before)
def extract_lets(example), do: extract(example.context, ESpec.Let)
def extract_finallies(example), do: extract(example.context, ESpec.Finally)
def extract_contexts(example), do: extract(example.context, ESpec.Context)
@doc "Extracts example option."
def extract_option(example, option) do
opts = extract_options(example)
opt = Enum.find(opts, fn {k, _v} -> k == option end)
if opt do
{^option, value} = opt
value
else
nil
end
end
@doc "Extracts example options."
def extract_options(example) do
contexts = ESpec.Example.extract_contexts(example)
List.flatten(example.opts ++ Enum.reverse(Enum.map(contexts, & &1.opts)))
end
def extract(context, module) do
Enum.filter(context, &(&1.__struct__ == module))
end
@doc "Message for skipped examples."
def skip_message(example) do
skipper = extract_option(example, :skip)
if skipper === true do
"Temporarily skipped without a reason."
else
"Temporarily skipped with: #{skipper}."
end
end
@doc "Message for pending examples."
def pending_message(example) do
if example.opts[:pending] === true do
"Pending example."
else
"Pending with message: #{example.opts[:pending]}."
end
end
end
| 29.217822 | 88 | 0.658082 |
ff437c9c54cd4e6210d94ee3362496c93cc04e05 | 39,135 | exs | Elixir | lib/elixir/test/elixir/string_test.exs | tnascimento/elixir | 9a4d10e702f33d2fa47718cde05375b506b4a3d6 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/string_test.exs | tnascimento/elixir | 9a4d10e702f33d2fa47718cde05375b506b4a3d6 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/string_test.exs | tnascimento/elixir | 9a4d10e702f33d2fa47718cde05375b506b4a3d6 | [
"Apache-2.0"
] | null | null | null | Code.require_file("test_helper.exs", __DIR__)
defmodule StringTest do
use ExUnit.Case, async: true
doctest String
test "next_codepoint/1" do
assert String.next_codepoint("ésoj") == {"é", "soj"}
assert String.next_codepoint(<<255>>) == {<<255>>, ""}
assert String.next_codepoint("") == nil
end
# test cases described in https://mortoray.com/2013/11/27/the-string-type-is-broken/
test "Unicode" do
assert String.reverse("noël") == "lëon"
assert String.slice("noël", 0..2) == "noë"
assert String.length("noël") == 4
assert String.length("") == 2
assert String.slice("", 1..1) == ""
assert String.reverse("") == ""
assert String.upcase("baffle") == "BAFFLE"
assert String.equivalent?("noël", "noël")
end
test "split/1,2,3" do
assert String.split("") == []
assert String.split("foo bar") == ["foo", "bar"]
assert String.split(" foo bar") == ["foo", "bar"]
assert String.split("foo bar ") == ["foo", "bar"]
assert String.split(" foo bar ") == ["foo", "bar"]
assert String.split("foo\t\n\v\f\r\sbar\n") == ["foo", "bar"]
assert String.split("foo" <> <<194, 133>> <> "bar") == ["foo", "bar"]
# information separators are not considered whitespace
assert String.split("foo\u001Fbar") == ["foo\u001Fbar"]
# no-break space is excluded
assert String.split("foo\00A0bar") == ["foo\00A0bar"]
assert String.split("foo\u202Fbar") == ["foo\u202Fbar"]
assert String.split("a,b,c", ",") == ["a", "b", "c"]
assert String.split("a,b", ".") == ["a,b"]
assert String.split("1,2 3,4", [" ", ","]) == ["1", "2", "3", "4"]
assert String.split("", ",") == [""]
assert String.split(" a b c ", " ") == ["", "a", "b", "c", ""]
assert String.split(" a b c ", " ", parts: :infinity) == ["", "a", "b", "c", ""]
assert String.split(" a b c ", " ", parts: 1) == [" a b c "]
assert String.split(" a b c ", " ", parts: 2) == ["", "a b c "]
assert String.split("", ",", trim: true) == []
assert String.split(" a b c ", " ", trim: true) == ["a", "b", "c"]
assert String.split(" a b c ", " ", trim: true, parts: :infinity) == ["a", "b", "c"]
assert String.split(" a b c ", " ", trim: true, parts: 1) == [" a b c "]
assert String.split(" a b c ", " ", trim: true, parts: 2) == ["a", "b c "]
assert String.split("abé", "") == ["", "a", "b", "é", ""]
assert String.split("abé", "", parts: :infinity) == ["", "a", "b", "é", ""]
assert String.split("abé", "", parts: 1) == ["abé"]
assert String.split("abé", "", parts: 2) == ["", "abé"]
assert String.split("abé", "", parts: 3) == ["", "a", "bé"]
assert String.split("abé", "", parts: 4) == ["", "a", "b", "é"]
assert String.split("abé", "", parts: 5) == ["", "a", "b", "é", ""]
assert String.split("abé", "", parts: 10) == ["", "a", "b", "é", ""]
assert String.split("abé", "", trim: true) == ["a", "b", "é"]
assert String.split("abé", "", trim: true, parts: :infinity) == ["a", "b", "é"]
assert String.split("abé", "", trim: true, parts: 2) == ["a", "bé"]
assert String.split("abé", "", trim: true, parts: 3) == ["a", "b", "é"]
assert String.split("abé", "", trim: true, parts: 4) == ["a", "b", "é"]
assert String.split("noël", "") == ["", "n", "o", "ë", "l", ""]
assert String.split("x-", "-", parts: 2, trim: true) == ["x"]
assert String.split("x-x-", "-", parts: 3, trim: true) == ["x", "x"]
assert String.split("hello", []) == ["hello"]
assert String.split("hello", [], trim: true) == ["hello"]
assert String.split("", []) == [""]
assert String.split("", [], trim: true) == []
assert_raise ArgumentError, fn ->
String.split("a,b,c", [""])
end
assert_raise ArgumentError, fn ->
String.split("a,b,c", [""])
end
end
test "split/2,3 with regex" do
assert String.split("", ~r{,}) == [""]
assert String.split("", ~r{,}, trim: true) == []
assert String.split("a,b", ~r{,}) == ["a", "b"]
assert String.split("a,b,c", ~r{,}) == ["a", "b", "c"]
assert String.split("a,b,c", ~r{,}, parts: 2) == ["a", "b,c"]
assert String.split("a,b.c ", ~r{\W}) == ["a", "b", "c", ""]
assert String.split("a,b.c ", ~r{\W}, trim: false) == ["a", "b", "c", ""]
assert String.split("a,b", ~r{\.}) == ["a,b"]
end
test "split/2,3 with compiled pattern" do
pattern = :binary.compile_pattern("-")
assert String.split("x-", pattern) == ["x", ""]
assert String.split("x-", pattern, parts: 2, trim: true) == ["x"]
assert String.split("x-x-", pattern, parts: 3, trim: true) == ["x", "x"]
end
test "splitter/2,3" do
assert String.splitter("a,b,c", ",") |> Enum.to_list() == ["a", "b", "c"]
assert String.splitter("a,b", ".") |> Enum.to_list() == ["a,b"]
assert String.splitter("1,2 3,4", [" ", ","]) |> Enum.to_list() == ["1", "2", "3", "4"]
assert String.splitter("", ",") |> Enum.to_list() == [""]
assert String.splitter("", ",", trim: true) |> Enum.to_list() == []
assert String.splitter(" a b c ", " ", trim: true) |> Enum.to_list() == ["a", "b", "c"]
assert String.splitter(" a b c ", " ", trim: true) |> Enum.take(1) == ["a"]
assert String.splitter(" a b c ", " ", trim: true) |> Enum.take(2) == ["a", "b"]
assert String.splitter("hello", []) |> Enum.to_list() == ["hello"]
assert String.splitter("hello", [], trim: true) |> Enum.to_list() == ["hello"]
assert String.splitter("", []) |> Enum.to_list() == [""]
assert String.splitter("", [], trim: true) |> Enum.to_list() == []
assert String.splitter("1,2 3,4 5", "") |> Enum.take(4) == ["", "1", ",", "2"]
assert_raise ArgumentError, fn ->
String.splitter("a", [""])
end
end
test "split_at/2" do
assert String.split_at("", 0) == {"", ""}
assert String.split_at("", -1) == {"", ""}
assert String.split_at("", 1) == {"", ""}
assert String.split_at("abc", 0) == {"", "abc"}
assert String.split_at("abc", 2) == {"ab", "c"}
assert String.split_at("abc", 3) == {"abc", ""}
assert String.split_at("abc", 4) == {"abc", ""}
assert String.split_at("abc", 1000) == {"abc", ""}
assert String.split_at("abc", -1) == {"ab", "c"}
assert String.split_at("abc", -3) == {"", "abc"}
assert String.split_at("abc", -4) == {"", "abc"}
assert String.split_at("abc", -1000) == {"", "abc"}
assert_raise FunctionClauseError, fn ->
String.split_at("abc", 0.1)
end
assert_raise FunctionClauseError, fn ->
String.split_at("abc", -0.1)
end
end
test "split_at/2 with invalid guard" do
assert String.split_at(<<?a, 195, 10, ?a>>, 2) == {<<?a, 195>>, <<10, ?a>>}
assert String.split_at(<<107, 205, 135, 184>>, 1) == {<<107, 205, 135>>, <<184>>}
end
test "upcase/1" do
assert String.upcase("123 abcd 456 efg hij ( %$#) kl mnop @ qrst = -_ uvwxyz") ==
"123 ABCD 456 EFG HIJ ( %$#) KL MNOP @ QRST = -_ UVWXYZ"
assert String.upcase("") == ""
assert String.upcase("abcD") == "ABCD"
end
test "upcase/1 with UTF-8" do
assert String.upcase("& % # àáâ ãäå 1 2 ç æ") == "& % # ÀÁÂ ÃÄÅ 1 2 Ç Æ"
assert String.upcase("àáâãäåæçèéêëìíîïðñòóôõöøùúûüýþ") == "ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ"
end
test "upcase/1 with UTF-8 multibyte" do
assert String.upcase("straße") == "STRASSE"
assert String.upcase("áüÈß") == "ÁÜÈSS"
end
test "upcase/1 with ascii" do
assert String.upcase("olá", :ascii) == "OLá"
end
test "upcase/1 with turkic" do
assert String.upcase("ıi", :turkic) == "Iİ"
assert String.upcase("Iİ", :turkic) == "Iİ"
end
test "downcase/1" do
assert String.downcase("123 ABcD 456 EfG HIJ ( %$#) KL MNOP @ QRST = -_ UVWXYZ") ==
"123 abcd 456 efg hij ( %$#) kl mnop @ qrst = -_ uvwxyz"
assert String.downcase("abcD") == "abcd"
assert String.downcase("") == ""
end
test "downcase/1 with UTF-8" do
assert String.downcase("& % # ÀÁÂ ÃÄÅ 1 2 Ç Æ") == "& % # àáâ ãäå 1 2 ç æ"
assert String.downcase("ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ") == "àáâãäåæçèéêëìíîïðñòóôõöøùúûüýþ"
assert String.downcase("áüÈß") == "áüèß"
end
test "downcase/1 with greek final sigma" do
assert String.downcase("Σ") == "σ"
assert String.downcase("ΣΣ") == "σσ"
assert String.downcase("Σ ΣΣ") == "σ σσ"
assert String.downcase("ΜΕΣ'ΑΠΟ") == "μεσ'απο"
assert String.downcase("ΑΣ'ΤΟΥΣ") == "ασ'τουσ"
assert String.downcase("Σ", :greek) == "σ"
assert String.downcase("Σ ΣΣ", :greek) == "σ σς"
assert String.downcase("Σ ΣΑΣ Σ", :greek) == "σ σας σ"
assert String.downcase("ΜΕΣ'ΑΠΟ", :greek) == "μεσ'απο"
assert String.downcase("ΑΣ'ΤΟΥΣ", :greek) == "ασ'τους"
end
test "downcase/1 with ascii" do
assert String.downcase("OLÁ", :ascii) == "olÁ"
end
test "downcase/1 with turkic" do
assert String.downcase("Iİ", :turkic) == "ıi"
assert String.downcase("İ", :turkic) == "i"
assert String.downcase("ıi", :turkic) == "ıi"
assert String.downcase("i", :turkic) == "i"
assert String.downcase("İ") == "i̇"
end
test "capitalize/1" do
assert String.capitalize("") == ""
assert String.capitalize("abc") == "Abc"
assert String.capitalize("ABC") == "Abc"
assert String.capitalize("c b a") == "C b a"
assert String.capitalize("1ABC") == "1abc"
assert String.capitalize("_aBc1") == "_abc1"
assert String.capitalize(" aBc1") == " abc1"
end
test "capitalize/1 with UTF-8" do
assert String.capitalize("àáâ") == "Àáâ"
assert String.capitalize("ÀÁÂ") == "Àáâ"
assert String.capitalize("âáà") == "Âáà"
assert String.capitalize("ÂÁÀ") == "Âáà"
assert String.capitalize("òóôõö") == "Òóôõö"
assert String.capitalize("ÒÓÔÕÖ") == "Òóôõö"
assert String.capitalize("fin") == "Fin"
end
test "capitalize/1 with ascii" do
assert String.capitalize("àáâ", :ascii) == "àáâ"
assert String.capitalize("aáA", :ascii) == "Aáa"
end
test "capitalize/1 with turkic" do
assert String.capitalize("iii", :turkic) == "İii"
assert String.capitalize("ııı", :turkic) == "Iıı"
assert String.capitalize("İii", :turkic) == "İii"
assert String.capitalize("Iıı", :turkic) == "Iıı"
end
test "replace_leading/3" do
assert String.replace_leading("aa abc ", "a", "b") == "bb abc "
assert String.replace_leading("__ abc ", "_", "b") == "bb abc "
assert String.replace_leading("aaaaaaaa ", "a", "b") == "bbbbbbbb "
assert String.replace_leading("aaaaaaaa ", "aaa", "b") == "bbaa "
assert String.replace_leading("aaaaaaaaa", "a", "b") == "bbbbbbbbb"
assert String.replace_leading("]]]]]]", "]", "[]") == "[][][][][][]"
assert String.replace_leading("]]]]]]]]", "]", "") == ""
assert String.replace_leading("]]]]]] ]", "]", "") == " ]"
assert String.replace_leading("猫猫 cat ", "猫", "й") == "йй cat "
assert String.replace_leading("test", "t", "T") == "Test"
assert String.replace_leading("t", "t", "T") == "T"
assert String.replace_leading("aaa", "b", "c") == "aaa"
message = ~r/cannot use an empty string/
assert_raise ArgumentError, message, fn ->
String.replace_leading("foo", "", "bar")
end
assert_raise ArgumentError, message, fn ->
String.replace_leading("", "", "bar")
end
end
test "replace_trailing/3" do
assert String.replace_trailing(" abc aa", "a", "b") == " abc bb"
assert String.replace_trailing(" abc __", "_", "b") == " abc bb"
assert String.replace_trailing(" aaaaaaaa", "a", "b") == " bbbbbbbb"
assert String.replace_trailing(" aaaaaaaa", "aaa", "b") == " aabb"
assert String.replace_trailing("aaaaaaaaa", "a", "b") == "bbbbbbbbb"
assert String.replace_trailing("]]]]]]", "]", "[]") == "[][][][][][]"
assert String.replace_trailing("]]]]]]]]", "]", "") == ""
assert String.replace_trailing("] ]]]]]]", "]", "") == "] "
assert String.replace_trailing(" cat 猫猫", "猫", "й") == " cat йй"
assert String.replace_trailing("test", "t", "T") == "tesT"
assert String.replace_trailing("t", "t", "T") == "T"
assert String.replace_trailing("aaa", "b", "c") == "aaa"
message = ~r/cannot use an empty string/
assert_raise ArgumentError, message, fn ->
String.replace_trailing("foo", "", "bar")
end
assert_raise ArgumentError, message, fn ->
String.replace_trailing("", "", "bar")
end
end
test "trim/1,2" do
assert String.trim("") == ""
assert String.trim(" abc ") == "abc"
assert String.trim("a abc a\n\n") == "a abc a"
assert String.trim("a abc a\t\n\v\f\r\s") == "a abc a"
assert String.trim("___ abc ___", "_") == " abc "
assert String.trim("猫猫猫cat猫猫猫", "猫猫") == "猫cat猫"
# no-break space
assert String.trim("\u00A0a abc a\u00A0") == "a abc a"
# whitespace defined as a range
assert String.trim("\u2008a abc a\u2005") == "a abc a"
end
test "trim_leading/1,2" do
assert String.trim_leading("") == ""
assert String.trim_leading(" abc ") == "abc "
assert String.trim_leading("a abc a") == "a abc a"
assert String.trim_leading("\n\na abc a") == "a abc a"
assert String.trim_leading("\t\n\v\f\r\sa abc a") == "a abc a"
assert String.trim_leading(<<194, 133, "a abc a">>) == "a abc a"
# information separators are not whitespace
assert String.trim_leading("\u001F a abc a") == "\u001F a abc a"
# no-break space
assert String.trim_leading("\u00A0 a abc a") == "a abc a"
assert String.trim_leading("aa aaa", "aaa") == "aa aaa"
assert String.trim_leading("aaa aaa", "aa") == "a aaa"
assert String.trim_leading("aa abc ", "a") == " abc "
assert String.trim_leading("__ abc ", "_") == " abc "
assert String.trim_leading("aaaaaaaaa ", "a") == " "
assert String.trim_leading("aaaaaaaaaa", "a") == ""
assert String.trim_leading("]]]]]] ]", "]") == " ]"
assert String.trim_leading("猫猫 cat ", "猫") == " cat "
assert String.trim_leading("test", "t") == "est"
assert String.trim_leading("t", "t") == ""
assert String.trim_leading("", "t") == ""
end
test "trim_trailing/1,2" do
assert String.trim_trailing("") == ""
assert String.trim_trailing("1\n") == "1"
assert String.trim_trailing("\r\n") == ""
assert String.trim_trailing(" abc ") == " abc"
assert String.trim_trailing(" abc a") == " abc a"
assert String.trim_trailing("a abc a\n\n") == "a abc a"
assert String.trim_trailing("a abc a\t\n\v\f\r\s") == "a abc a"
assert String.trim_trailing(<<"a abc a", 194, 133>>) == "a abc a"
# information separators are not whitespace
assert String.trim_trailing("a abc a \u001F") == "a abc a \u001F"
# no-break space
assert String.trim_trailing("a abc a \u00A0") == "a abc a"
assert String.trim_trailing("aaa aa", "aaa") == "aaa aa"
assert String.trim_trailing("aaa aaa", "aa") == "aaa a"
assert String.trim_trailing(" abc aa", "a") == " abc "
assert String.trim_trailing(" abc __", "_") == " abc "
assert String.trim_trailing(" aaaaaaaaa", "a") == " "
assert String.trim_trailing("aaaaaaaaaa", "a") == ""
assert String.trim_trailing("] ]]]]]]", "]") == "] "
assert String.trim_trailing(" cat 猫猫", "猫") == " cat "
assert String.trim_trailing("test", "t") == "tes"
assert String.trim_trailing("t", "t") == ""
assert String.trim_trailing("", "t") == ""
end
test "pad_leading/2,3" do
assert String.pad_leading("", 5) == " "
assert String.pad_leading("abc", 5) == " abc"
assert String.pad_leading(" abc ", 9) == " abc "
assert String.pad_leading("猫", 5) == " 猫"
assert String.pad_leading("-", 0) == "-"
assert String.pad_leading("-", 1) == "-"
assert String.pad_leading("---", 5, "abc") == "ab---"
assert String.pad_leading("---", 9, "abc") == "abcabc---"
assert String.pad_leading("---", 5, ["abc"]) == "abcabc---"
assert String.pad_leading("--", 6, ["a", "bc"]) == "abcabc--"
assert_raise FunctionClauseError, fn ->
String.pad_leading("-", -1)
end
assert_raise FunctionClauseError, fn ->
String.pad_leading("-", 1, [])
end
message = "expected a string padding element, got: 10"
assert_raise ArgumentError, message, fn ->
String.pad_leading("-", 3, ["-", 10])
end
end
test "pad_trailing/2,3" do
assert String.pad_trailing("", 5) == " "
assert String.pad_trailing("abc", 5) == "abc "
assert String.pad_trailing(" abc ", 9) == " abc "
assert String.pad_trailing("猫", 5) == "猫 "
assert String.pad_trailing("-", 0) == "-"
assert String.pad_trailing("-", 1) == "-"
assert String.pad_trailing("---", 5, "abc") == "---ab"
assert String.pad_trailing("---", 9, "abc") == "---abcabc"
assert String.pad_trailing("---", 5, ["abc"]) == "---abcabc"
assert String.pad_trailing("--", 6, ["a", "bc"]) == "--abcabc"
assert_raise FunctionClauseError, fn ->
String.pad_trailing("-", -1)
end
assert_raise FunctionClauseError, fn ->
String.pad_trailing("-", 1, [])
end
message = "expected a string padding element, got: 10"
assert_raise ArgumentError, message, fn ->
String.pad_trailing("-", 3, ["-", 10])
end
end
test "reverse/1" do
assert String.reverse("") == ""
assert String.reverse("abc") == "cba"
assert String.reverse("Hello World") == "dlroW olleH"
assert String.reverse("Hello ∂og") == "go∂ olleH"
assert String.reverse("Ā̀stute") == "etutsĀ̀"
assert String.reverse(String.reverse("Hello World")) == "Hello World"
assert String.reverse(String.reverse("Hello \r\n World")) == "Hello \r\n World"
end
describe "replace/3" do
test "with empty string and string replacement" do
assert String.replace("elixir", "", "") == "elixir"
assert String.replace("ELIXIR", "", ".") == ".E.L.I.X.I.R."
assert String.replace("ELIXIR", "", ".", global: true) == ".E.L.I.X.I.R."
assert String.replace("ELIXIR", "", ".", global: false) == ".ELIXIR"
assert_raise ArgumentError, fn ->
String.replace("elixir", [""], "")
end
end
test "with empty pattern list" do
assert String.replace("elixir", [], "anything") == "elixir"
end
test "with match pattern and string replacement" do
assert String.replace("a,b,c", ",", "-") == "a-b-c"
assert String.replace("a,b,c", [",", "b"], "-") == "a---c"
assert String.replace("a,b,c", ",", "-", global: false) == "a-b,c"
assert String.replace("a,b,c", [",", "b"], "-", global: false) == "a-b,c"
assert String.replace("ãéã", "é", "e", global: false) == "ãeã"
end
test "with regex and string replacement" do
assert String.replace("a,b,c", ~r/,(.)/, ",\\1\\1") == "a,bb,cc"
assert String.replace("a,b,c", ~r/,(.)/, ",\\1\\1", global: false) == "a,bb,c"
end
test "with empty string and function replacement" do
assert String.replace("elixir", "", fn "" -> "" end) == "elixir"
assert String.replace("ELIXIR", "", fn "" -> "." end) == ".E.L.I.X.I.R."
assert String.replace("ELIXIR", "", fn "" -> "." end, global: true) == ".E.L.I.X.I.R."
assert String.replace("ELIXIR", "", fn "" -> "." end, global: false) == ".ELIXIR"
assert String.replace("elixir", "", fn "" -> [""] end) == "elixir"
assert String.replace("ELIXIR", "", fn "" -> ["."] end) == ".E.L.I.X.I.R."
assert String.replace("ELIXIR", "", fn "" -> ["."] end, global: true) == ".E.L.I.X.I.R."
assert String.replace("ELIXIR", "", fn "" -> ["."] end, global: false) == ".ELIXIR"
end
test "with match pattern and function replacement" do
assert String.replace("a,b,c", ",", fn "," -> "-" end) == "a-b-c"
assert String.replace("a,b,c", [",", "b"], fn x -> "[#{x}]" end) == "a[,][b][,]c"
assert String.replace("a,b,c", [",", "b"], fn x -> [?[, x, ?]] end) == "a[,][b][,]c"
assert String.replace("a,b,c", ",", fn "," -> "-" end, global: false) == "a-b,c"
assert String.replace("a,b,c", [",", "b"], fn x -> "[#{x}]" end, global: false) == "a[,]b,c"
assert String.replace("ãéã", "é", fn "é" -> "e" end, global: false) == "ãeã"
end
test "with regex and function replacement" do
assert String.replace("a,b,c", ~r/,(.)/, fn x -> "#{x}#{x}" end) == "a,b,b,c,c"
assert String.replace("a,b,c", ~r/,(.)/, fn x -> [x, x] end) == "a,b,b,c,c"
assert String.replace("a,b,c", ~r/,(.)/, fn x -> "#{x}#{x}" end, global: false) == "a,b,b,c"
assert String.replace("a,b,c", ~r/,(.)/, fn x -> [x, x] end, global: false) == "a,b,b,c"
end
end
describe "replace/4" do
test "with incorrect params" do
assert_raise FunctionClauseError, "no function clause matching in String.replace/4", fn ->
String.replace("a,b,c", "a,b,c", ",", "")
end
end
end
test "duplicate/2" do
assert String.duplicate("abc", 0) == ""
assert String.duplicate("abc", 1) == "abc"
assert String.duplicate("abc", 2) == "abcabc"
assert String.duplicate("&ã$", 2) == "&ã$&ã$"
assert_raise ArgumentError, fn ->
String.duplicate("abc", -1)
end
end
test "codepoints/1" do
assert String.codepoints("elixir") == ["e", "l", "i", "x", "i", "r"]
# slovak
assert String.codepoints("elixír") == ["e", "l", "i", "x", "í", "r"]
# armenian
assert String.codepoints("ոգելից ըմպելիք") ==
["ո", "գ", "ե", "լ", "ի", "ց", " ", "ը", "մ", "պ", "ե", "լ", "ի", "ք"]
# belarussian
assert String.codepoints("эліксір") == ["э", "л", "і", "к", "с", "і", "р"]
# greek
assert String.codepoints("ελιξήριο") == ["ε", "λ", "ι", "ξ", "ή", "ρ", "ι", "ο"]
# hebraic
assert String.codepoints("סם חיים") == ["ס", "ם", " ", "ח", "י", "י", "ם"]
# hindi
assert String.codepoints("अमृत") == ["अ", "म", "ृ", "त"]
# bengali
assert String.codepoints("স্পর্শমণি") == ["স", "্", "প", "র", "্", "শ", "ম", "ণ", "ি"]
# gujarati
assert String.codepoints("સર્વશ્રેષ્ઠ ઇલાજ") ==
["સ", "ર", "્", "વ", "શ", "્", "ર", "ે", "ષ", "્", "ઠ", " ", "ઇ", "લ", "ા", "જ"]
# japanese
assert String.codepoints("世界中の一番") == ["世", "界", "中", "の", "一", "番"]
assert String.codepoints("がガちゃ") == ["が", "ガ", "ち", "ゃ"]
assert String.codepoints("") == []
assert String.codepoints("ϖͲϥЫݎߟΈټϘለДШव׆ש؇؊صلټܗݎޥޘ߉ऌ૫ሏᶆ℆ℙℱ ⅚Ⅷ↠∈⌘①ffi") ==
["ϖ", "Ͳ", "ϥ", "Ы", "ݎ", "ߟ", "Έ"] ++
["ټ", "Ϙ", "ለ", "Д", "Ш", "व"] ++
["׆", "ש", "؇", "؊", "ص", "ل", "ټ"] ++
["ܗ", "ݎ", "ޥ", "ޘ", "߉", "ऌ", "૫"] ++
["ሏ", "ᶆ", "℆", "ℙ", "ℱ", " ", "⅚"] ++ ["Ⅷ", "↠", "∈", "⌘", "①", "ffi"]
end
test "equivalent?/2" do
assert String.equivalent?("", "")
assert String.equivalent?("elixir", "elixir")
assert String.equivalent?("뢴", "뢴")
assert String.equivalent?("ṩ", "ṩ")
refute String.equivalent?("ELIXIR", "elixir")
refute String.equivalent?("døge", "dóge")
end
test "graphemes/1" do
# Extended
assert String.graphemes("Ā̀stute") == ["Ā̀", "s", "t", "u", "t", "e"]
# CLRF
assert String.graphemes("\r\n\f") == ["\r\n", "\f"]
# Regional indicator
assert String.graphemes("\u{1F1E6}\u{1F1E7}") == ["\u{1F1E6}\u{1F1E7}"]
assert String.graphemes("\u{1F1E6}\u{1F1E7}\u{1F1E8}") == ["\u{1F1E6}\u{1F1E7}", "\u{1F1E8}"]
# Hangul
assert String.graphemes("\u1100\u115D\uB4A4") == ["ᄀᅝ뒤"]
# Special Marking with Extended
assert String.graphemes("a\u0300\u0903") == ["a\u0300\u0903"]
end
test "next_grapheme/1" do
assert String.next_grapheme("Ā̀stute") == {"Ā̀", "stute"}
assert String.next_grapheme("") == nil
end
test "first/1" do
assert String.first("elixir") == "e"
assert String.first("íelixr") == "í"
assert String.first("եոգլից ըմպելիք") == "ե"
assert String.first("лэіксір") == "л"
assert String.first("ελιξήριο") == "ε"
assert String.first("סם חיים") == "ס"
assert String.first("がガちゃ") == "が"
assert String.first("Ā̀stute") == "Ā̀"
assert String.first("") == nil
end
test "last/1" do
assert String.last("elixir") == "r"
assert String.last("elixrí") == "í"
assert String.last("եոգլից ըմպելիքե") == "ե"
assert String.last("ліксірэ") == "э"
assert String.last("ειξήριολ") == "λ"
assert String.last("סם ייםח") == "ח"
assert String.last("がガちゃ") == "ゃ"
assert String.last("Ā̀") == "Ā̀"
assert String.last("") == nil
end
test "length/1" do
assert String.length("elixir") == 6
assert String.length("elixrí") == 6
assert String.length("եոգլից") == 6
assert String.length("ліксрэ") == 6
assert String.length("ειξήριολ") == 8
assert String.length("סם ייםח") == 7
assert String.length("がガちゃ") == 4
assert String.length("Ā̀stute") == 6
assert String.length("👨👩👧👦") == 1
assert String.length("") == 0
end
test "at/2" do
assert String.at("л", 0) == "л"
assert String.at("elixir", 1) == "l"
assert String.at("がガちゃ", 2) == "ち"
assert String.at("л", 10) == nil
assert String.at("elixir", -1) == "r"
assert String.at("がガちゃ", -2) == "ち"
assert String.at("л", -3) == nil
assert String.at("Ā̀stute", 1) == "s"
assert String.at("elixir", 6) == nil
assert_raise FunctionClauseError, fn ->
String.at("elixir", 0.1)
end
assert_raise FunctionClauseError, fn ->
String.at("elixir", -0.1)
end
end
test "slice/3" do
assert String.slice("elixir", 1, 3) == "lix"
assert String.slice("あいうえお", 2, 2) == "うえ"
assert String.slice("ειξήριολ", 2, 3) == "ξήρ"
assert String.slice("elixir", 3, 4) == "xir"
assert String.slice("あいうえお", 3, 5) == "えお"
assert String.slice("ειξήριολ", 5, 4) == "ιολ"
assert String.slice("elixir", -3, 2) == "xi"
assert String.slice("あいうえお", -4, 3) == "いうえ"
assert String.slice("ειξήριολ", -5, 3) == "ήρι"
assert String.slice("elixir", -10, 1) == "e"
assert String.slice("あいうえお", -10, 2) == "あい"
assert String.slice("ειξήριολ", -10, 3) == "ειξ"
assert String.slice("elixir", 8, 2) == ""
assert String.slice("あいうえお", 6, 2) == ""
assert String.slice("ειξήριολ", 8, 1) == ""
assert String.slice("ειξήριολ", 9, 1) == ""
assert String.slice("elixir", 0, 0) == ""
assert String.slice("elixir", 5, 0) == ""
assert String.slice("elixir", -5, 0) == ""
assert String.slice("elixir", -10, 10) == "elixir"
assert String.slice("", 0, 1) == ""
assert String.slice("", 1, 1) == ""
end
test "slice/2" do
assert String.slice("elixir", 0..-2) == "elixi"
assert String.slice("elixir", 1..3) == "lix"
assert String.slice("elixir", -5..-3) == "lix"
assert String.slice("elixir", -5..3) == "lix"
assert String.slice("elixir", -10..10) == "elixir"
assert String.slice("あいうえお", 2..3) == "うえ"
assert String.slice("ειξήριολ", 2..4) == "ξήρ"
assert String.slice("elixir", 3..6) == "xir"
assert String.slice("あいうえお", 3..7) == "えお"
assert String.slice("ειξήριολ", 5..8) == "ιολ"
assert String.slice("elixir", -3..-2) == "xi"
assert String.slice("あいうえお", -4..-2) == "いうえ"
assert String.slice("ειξήριολ", -5..-3) == "ήρι"
assert String.slice("elixir", 8..9) == ""
assert String.slice("あいうえお", 6..7) == ""
assert String.slice("ειξήριολ", 8..8) == ""
assert String.slice("ειξήριολ", 9..9) == ""
assert String.slice("", 0..0) == ""
assert String.slice("", 1..1) == ""
assert String.slice("あいうえお", -2..-4) == ""
assert String.slice("あいうえお", -10..-15) == ""
assert String.slice("hello あいうえお Unicode", 8..-1) == "うえお Unicode"
assert String.slice("abc", -1..14) == "c"
assert String.slice("a·̀ͯ‿.⁀:", 0..-2) == "a·̀ͯ‿.⁀"
assert_raise FunctionClauseError, fn ->
String.slice(nil, 0..1)
end
end
test "slice/2 with steps" do
assert String.slice("elixir", 0..-2//2) == "eii"
assert String.slice("elixir", 1..3//2) == "lx"
assert String.slice("elixir", -5..-3//2) == "lx"
assert String.slice("elixir", -5..3//2) == "lx"
assert String.slice("あいうえお", 2..3//2) == "う"
assert String.slice("ειξήριολ", 2..4//2) == "ξρ"
assert String.slice("elixir", 3..6//2) == "xr"
assert String.slice("あいうえお", 3..7//2) == "え"
assert String.slice("ειξήριολ", 5..8//2) == "ιλ"
assert String.slice("elixir", -3..-2//2) == "x"
assert String.slice("あいうえお", -4..-2//2) == "いえ"
assert String.slice("ειξήριολ", -5..-3//2) == "ήι"
assert String.slice("elixir", 8..9//2) == ""
assert String.slice("", 0..0//2) == ""
assert String.slice("", 1..1//2) == ""
assert String.slice("あいうえお", -2..-4//2) == ""
assert String.slice("あいうえお", -10..-15//2) == ""
assert String.slice("hello あいうえお Unicode", 8..-1//2) == "うおUioe"
assert String.slice("abc", -1..14//2) == "c"
assert String.slice("a·̀ͯ‿.⁀:", 0..-2//2) == "a‿⁀"
end
test "valid?/1" do
assert String.valid?("afds")
assert String.valid?("øsdfh")
assert String.valid?("dskfjあska")
assert String.valid?(<<0xEF, 0xB7, 0x90>>)
refute String.valid?(<<0xFFFF::16>>)
refute String.valid?("asd" <> <<0xFFFF::16>>)
end
test "chunk/2 with :valid trait" do
assert String.chunk("", :valid) == []
assert String.chunk("ødskfjあ\x11ska", :valid) == ["ødskfjあ\x11ska"]
end
test "chunk/2 with :printable trait" do
assert String.chunk("", :printable) == []
assert String.chunk("ødskfjあska", :printable) == ["ødskfjあska"]
assert String.chunk("abc\u{0FFFF}def", :printable) == ["abc", <<0x0FFFF::utf8>>, "def"]
assert String.chunk("\x06ab\x05cdef\x03\0", :printable) ==
[<<6>>, "ab", <<5>>, "cdef", <<3, 0>>]
end
test "starts_with?/2" do
assert String.starts_with?("hello", "he")
assert String.starts_with?("hello", "hello")
refute String.starts_with?("hello", [])
assert String.starts_with?("hello", "")
assert String.starts_with?("hello", [""])
assert String.starts_with?("hello", ["hellö", "hell"])
assert String.starts_with?("エリクシア", "エリ")
refute String.starts_with?("hello", "lo")
refute String.starts_with?("hello", "hellö")
refute String.starts_with?("hello", ["hellö", "goodbye"])
refute String.starts_with?("エリクシア", "仙丹")
end
test "ends_with?/2" do
assert String.ends_with?("hello", "lo")
assert String.ends_with?("hello", "hello")
refute String.ends_with?("hello", [])
assert String.ends_with?("hello", ["hell", "lo", "xx"])
assert String.ends_with?("hello", ["hellö", "lo"])
assert String.ends_with?("エリクシア", "シア")
refute String.ends_with?("hello", "he")
refute String.ends_with?("hello", "hellö")
refute String.ends_with?("hello", ["hel", "goodbye"])
refute String.ends_with?("エリクシア", "仙丹")
end
test "contains?/2" do
assert String.contains?("elixir of life", "of")
assert String.contains?("エリクシア", "シ")
refute String.contains?("elixir of life", [])
assert String.contains?("elixir of life", "")
assert String.contains?("elixir of life", [""])
assert String.contains?("elixir of life", ["mercury", "life"])
refute String.contains?("elixir of life", "death")
refute String.contains?("エリクシア", "仙")
refute String.contains?("elixir of life", ["death", "mercury", "eternal life"])
end
test "to_charlist/1" do
assert String.to_charlist("æß") == [?æ, ?ß]
assert String.to_charlist("abc") == [?a, ?b, ?c]
assert_raise UnicodeConversionError, "invalid encoding starting at <<223, 255>>", fn ->
String.to_charlist(<<0xDF, 0xFF>>)
end
assert_raise UnicodeConversionError, "incomplete encoding starting at <<195>>", fn ->
String.to_charlist(<<106, 111, 115, 195>>)
end
end
test "to_float/1" do
assert String.to_float("3.0") == 3.0
three = fn -> "3" end
assert_raise ArgumentError, fn -> String.to_float(three.()) end
end
test "jaro_distance/2" do
assert String.jaro_distance("same", "same") == 1.0
assert String.jaro_distance("any", "") == 0.0
assert String.jaro_distance("", "any") == 0.0
assert String.jaro_distance("martha", "marhta") == 0.9444444444444445
assert String.jaro_distance("martha", "marhha") == 0.888888888888889
assert String.jaro_distance("marhha", "martha") == 0.888888888888889
assert String.jaro_distance("dwayne", "duane") == 0.8222222222222223
assert String.jaro_distance("dixon", "dicksonx") == 0.7666666666666666
assert String.jaro_distance("xdicksonx", "dixon") == 0.7851851851851852
assert String.jaro_distance("shackleford", "shackelford") == 0.9696969696969697
assert String.jaro_distance("dunningham", "cunnigham") == 0.8962962962962964
assert String.jaro_distance("nichleson", "nichulson") == 0.9259259259259259
assert String.jaro_distance("jones", "johnson") == 0.7904761904761904
assert String.jaro_distance("massey", "massie") == 0.888888888888889
assert String.jaro_distance("abroms", "abrams") == 0.888888888888889
assert String.jaro_distance("hardin", "martinez") == 0.7222222222222222
assert String.jaro_distance("itman", "smith") == 0.4666666666666666
assert String.jaro_distance("jeraldine", "geraldine") == 0.9259259259259259
assert String.jaro_distance("michelle", "michael") == 0.8690476190476191
assert String.jaro_distance("julies", "julius") == 0.888888888888889
assert String.jaro_distance("tanya", "tonya") == 0.8666666666666667
assert String.jaro_distance("sean", "susan") == 0.7833333333333333
assert String.jaro_distance("jon", "john") == 0.9166666666666666
assert String.jaro_distance("jon", "jan") == 0.7777777777777777
assert String.jaro_distance("семена", "стремя") == 0.6666666666666666
end
test "myers_difference/2" do
assert String.myers_difference("", "abc") == [ins: "abc"]
assert String.myers_difference("abc", "") == [del: "abc"]
assert String.myers_difference("", "") == []
assert String.myers_difference("abc", "abc") == [eq: "abc"]
assert String.myers_difference("abc", "aйbc") == [eq: "a", ins: "й", eq: "bc"]
assert String.myers_difference("aйbc", "abc") == [eq: "a", del: "й", eq: "bc"]
end
test "normalize/2" do
assert String.normalize("ŝ", :nfd) == "ŝ"
assert String.normalize("ḇravô", :nfd) == "ḇravô"
assert String.normalize("ṩierra", :nfd) == "ṩierra"
assert String.normalize("뢴", :nfd) == "뢴"
assert String.normalize("êchǭ", :nfc) == "êchǭ"
assert String.normalize("거̄", :nfc) == "거̄"
assert String.normalize("뢴", :nfc) == "뢴"
## Error cases
assert String.normalize(<<15, 216>>, :nfc) == <<15, 216>>
assert String.normalize(<<15, 216>>, :nfd) == <<15, 216>>
assert String.normalize(<<216, 15>>, :nfc) == <<216, 15>>
assert String.normalize(<<216, 15>>, :nfd) == <<216, 15>>
assert String.normalize(<<15, 216>>, :nfkc) == <<15, 216>>
assert String.normalize(<<15, 216>>, :nfkd) == <<15, 216>>
assert String.normalize(<<216, 15>>, :nfkc) == <<216, 15>>
assert String.normalize(<<216, 15>>, :nfkd) == <<216, 15>>
## Cases from NormalizationTest.txt
# 05B8 05B9 05B1 0591 05C3 05B0 05AC 059F
# 05B1 05B8 05B9 0591 05C3 05B0 05AC 059F
# HEBREW POINT QAMATS, HEBREW POINT HOLAM, HEBREW POINT HATAF SEGOL,
# HEBREW ACCENT ETNAHTA, HEBREW PUNCTUATION SOF PASUQ, HEBREW POINT SHEVA,
# HEBREW ACCENT ILUY, HEBREW ACCENT QARNEY PARA
assert String.normalize("ֱָֹ֑׃ְ֬֟", :nfc) == "ֱָֹ֑׃ְ֬֟"
# 095D (exclusion list)
# 0922 093C
# DEVANAGARI LETTER RHA
assert String.normalize("ढ़", :nfc) == "ढ़"
# 0061 0315 0300 05AE 0340 0062
# 00E0 05AE 0300 0315 0062
# LATIN SMALL LETTER A, COMBINING COMMA ABOVE RIGHT, COMBINING GRAVE ACCENT,
# HEBREW ACCENT ZINOR, COMBINING GRAVE TONE MARK, LATIN SMALL LETTER B
assert String.normalize("à֮̀̕b", :nfc) == "à֮̀̕b"
# 0344
# 0308 0301
# COMBINING GREEK DIALYTIKA TONOS
assert String.normalize("\u0344", :nfc) == "\u0308\u0301"
# 115B9 0334 115AF
# 115B9 0334 115AF
# SIDDHAM VOWEL SIGN AI, COMBINING TILDE OVERLAY, SIDDHAM VOWEL SIGN AA
assert String.normalize("𑖹̴𑖯", :nfc) == "𑖹̴𑖯"
# HEBREW ACCENT ETNAHTA, HEBREW PUNCTUATION SOF PASUQ, HEBREW POINT SHEVA,
# HEBREW ACCENT ILUY, HEBREW ACCENT QARNEY PARA
assert String.normalize("ֱָֹ֑׃ְ֬֟", :nfc) == "ֱָֹ֑׃ְ֬֟"
# 095D (exclusion list)
# HEBREW ACCENT ETNAHTA, HEBREW PUNCTUATION SOF PASUQ, HEBREW POINT SHEVA,
# HEBREW ACCENT ILUY, HEBREW ACCENT QARNEY PARA
assert String.normalize("ֱָֹ֑׃ְ֬֟", :nfc) == "ֱָֹ֑׃ְ֬֟"
# 095D (exclusion list)
# 0922 093C
# DEVANAGARI LETTER RHA
assert String.normalize("ढ़", :nfc) == "ढ़"
# 0061 0315 0300 05AE 0340 0062
# 00E0 05AE 0300 0315 0062
# LATIN SMALL LETTER A, COMBINING COMMA ABOVE RIGHT, COMBINING GRAVE ACCENT,
# HEBREW ACCENT ZINOR, COMBINING GRAVE TONE MARK, LATIN SMALL LETTER B
assert String.normalize("à֮̀̕b", :nfc) == "à֮̀̕b"
# 0344
# 0308 0301
# COMBINING GREEK DIALYTIKA TONOS
assert String.normalize("\u0344", :nfc) == "\u0308\u0301"
# 115B9 0334 115AF
# 115B9 0334 115AF
# SIDDHAM VOWEL SIGN AI, COMBINING TILDE OVERLAY, SIDDHAM VOWEL SIGN AA
assert String.normalize("𑖹̴𑖯", :nfc) == "𑖹̴𑖯"
# (ff; ff; ff; ff; ff; ) LATIN SMALL LIGATURE FF
# FB00;FB00;FB00;0066 0066;0066 0066;
assert String.normalize("ff", :nfkd) == "\u0066\u0066"
# (fl; fl; fl; fl; fl; ) LATIN SMALL LIGATURE FL
# FB02;FB02;FB02;0066 006C;0066 006C;
assert String.normalize("fl", :nfkd) == "\u0066\u006C"
# (ſt; ſt; ſt; st; st; ) LATIN SMALL LIGATURE LONG S T
# FB05;FB05;FB05;0073 0074;0073 0074;
assert String.normalize("ſt", :nfkd) == "\u0073\u0074"
# (st; st; st; st; st; ) LATIN SMALL LIGATURE ST
# FB06;FB06;FB06;0073 0074;0073 0074;
assert String.normalize("\u0073\u0074", :nfkc) == "\u0073\u0074"
# (ﬓ; ﬓ; ﬓ; մն; մն; ) ARMENIAN SMALL LIGATURE MEN NOW
# FB13;FB13;FB13;0574 0576;0574 0576;
assert String.normalize("\u0574\u0576", :nfkc) == "\u0574\u0576"
end
# Carriage return can be a grapheme cluster if followed by
# newline so we test some corner cases here.
test "carriage return" do
assert String.at("\r\t\v", 0) == "\r"
assert String.at("\r\t\v", 1) == "\t"
assert String.at("\r\t\v", 2) == "\v"
assert String.at("\xFF\r\t\v", 1) == "\r"
assert String.at("\r\xFF\t\v", 2) == "\t"
assert String.at("\r\t\xFF\v", 3) == "\v"
assert String.last("\r\t\v") == "\v"
assert String.last("\r\xFF\t\xFF\v") == "\v"
assert String.next_grapheme("\r\t\v") == {"\r", "\t\v"}
assert String.next_grapheme("\t\v") == {"\t", "\v"}
assert String.next_grapheme("\v") == {"\v", ""}
assert String.length("\r\t\v") == 3
assert String.length("\r\xFF\t\v") == 4
assert String.length("\r\t\xFF\v") == 4
assert String.bag_distance("\r\t\xFF\v", "\xFF\r\n\xFF") == 0.25
assert String.split("\r\t\v", "") == ["", "\r", "\t", "\v", ""]
end
end
| 40.179671 | 98 | 0.567523 |
ff4386a1d8a3e64304537f852af101e4e4dc42fa | 3,699 | ex | Elixir | lib/mongo/server_description.ex | rafamedina/mongodb | 65bd68adbc2753c38d927ad0ba804fe8e66d50d7 | [
"Apache-2.0"
] | null | null | null | lib/mongo/server_description.ex | rafamedina/mongodb | 65bd68adbc2753c38d927ad0ba804fe8e66d50d7 | [
"Apache-2.0"
] | null | null | null | lib/mongo/server_description.ex | rafamedina/mongodb | 65bd68adbc2753c38d927ad0ba804fe8e66d50d7 | [
"Apache-2.0"
] | null | null | null | defmodule Mongo.ServerDescription do
@moduledoc false
# see https://github.com/mongodb/specifications/blob/master/source/server-discovery-and-monitoring/server-discovery-and-monitoring.rst#serverdescription
@type type :: :standalone | :mongos | :possible_primary | :rs_primary |
:rs_secondary | :rs_arbiter | :rs_other | :rs_ghost | :unknown
@type t :: %{
address: String.t | nil,
error: String.t | nil,
round_trip_time: non_neg_integer | nil,
last_write_date: BSON.DateTime.t,
op_time: BSON.ObjectId.t | nil,
type: type,
min_wire_version: non_neg_integer,
max_wire_version: non_neg_integer,
me: String.t | nil,
hosts: [String.t],
passives: [String.t],
arbiters: [String.t],
tag_set: %{String.t => String.t},
set_name: String.t | nil,
set_version: non_neg_integer | nil,
election_id: BSON.ObjectId.t | nil,
primary: String.t | nil,
last_update_time: non_neg_integer
}
def defaults(map \\ %{}) do
Map.merge(%{
address: "localhost:27017",
error: nil,
round_trip_time: nil,
last_write_date: nil,
op_time: nil,
type: :unknown,
min_wire_version: 0,
max_wire_version: 0,
me: nil,
hosts: [],
passives: [],
arbiters: [],
tag_set: %{},
set_name: nil,
set_version: nil,
election_id: nil,
primary: nil,
last_update_time: 0
}, map)
end
def from_is_master_error(last_server_description, error) do
defaults(%{
address: last_server_description.address,
error: error
})
end
# see https://github.com/mongodb/specifications/blob/master/source/server-discovery-and-monitoring/server-discovery-and-monitoring.rst#parsing-an-ismaster-response
def from_is_master(last_description, rtt, finish_time, is_master_reply) do
last_rtt = last_description.round_trip_time || rtt
defaults(%{
address: last_description.address,
round_trip_time: round(0.2 * rtt + 0.8 * last_rtt),
type: determine_server_type(is_master_reply),
last_write_date: get_in(is_master_reply,
["lastWrite", "lastWriteDate"]),
op_time: get_in(is_master_reply, ["lastWrite", "opTime"]),
last_update_time: finish_time,
min_wire_version: is_master_reply["minWireVersion"] || 0,
max_wire_version: is_master_reply["maxWireVersion"] || 0,
me: is_master_reply["me"],
hosts: (is_master_reply["hosts"] || []) |> Enum.map(&String.downcase/1),
passives: (is_master_reply["passives"] || [])
|> Enum.map(&String.downcase/1),
arbiters: (is_master_reply["arbiters"] || [])
|> Enum.map(&String.downcase/1),
tag_set: is_master_reply["tags"] || %{},
set_name: is_master_reply["setName"],
set_version: is_master_reply["setVersion"],
election_id: is_master_reply["electionId"],
primary: is_master_reply["primary"]
})
end
# see https://github.com/mongodb/specifications/blob/master/source/server-discovery-and-monitoring/server-discovery-and-monitoring.rst#type
defp determine_server_type(%{"ok" => n}) when n != 1, do: :unknown
defp determine_server_type(%{"msg" => "isdbgrid"}), do: :mongos
defp determine_server_type(%{"isreplicaset" => true}), do: :rs_ghost
defp determine_server_type(%{"setName" => set_name} = is_master_reply) when set_name != nil do
case is_master_reply do
%{"ismaster" => true} ->
:rs_primary
%{"secondary" => true} ->
:rs_secondary
%{"arbiterOnly" => true} ->
:rs_arbiter
_ ->
:rs_other
end
end
defp determine_server_type(_), do: :standalone
end
| 35.567308 | 165 | 0.650176 |
ff43911b93dc842a437e725014f833b9c86c1a03 | 1,512 | exs | Elixir | test/users/events_test.exs | jgchristopher/tentacat | 47cec7d3d4330a8d42067d6ab38b362c92997467 | [
"MIT"
] | null | null | null | test/users/events_test.exs | jgchristopher/tentacat | 47cec7d3d4330a8d42067d6ab38b362c92997467 | [
"MIT"
] | null | null | null | test/users/events_test.exs | jgchristopher/tentacat | 47cec7d3d4330a8d42067d6ab38b362c92997467 | [
"MIT"
] | null | null | null | defmodule Tentacat.Users.EventsTest do
use ExUnit.Case, async: false
use ExVCR.Mock, adapter: ExVCR.Adapter.Hackney
import Tentacat.Users.Events
doctest Tentacat.Users.Events
@client Tentacat.Client.new()
setup_all do
HTTPoison.start()
end
test "list/2" do
use_cassette "users/events#list" do
assert elem(list(@client, "soudqwiggle"), 1) == []
end
end
test "list/3" do
use_cassette "users/events#list_e_tag" do
assert {304, _, _} =
list(
@client,
"soudqwiggle",
"1e1eaa79cefda6e152f880f49c5bd0c6a4bf662c957cdfe7c2923d316c4db966"
)
end
end
test "list_public/2" do
use_cassette "users/events#list_public" do
assert elem(list_public(@client, "soudqwiggle"), 1) == []
end
end
test "list_public/3" do
use_cassette "users/events#list_public_e_tag" do
assert {304, _, _} =
list_public(
@client,
"soudqwiggle",
"1e1eaa79cefda6e152f880f49c5bd0c6a4bf662c957cdfe7c2923d316c4db966"
)
end
end
test "list_user_org/3" do
use_cassette "users/events#list_user_org" do
assert {404, _, _} = list_user_org(@client, "duksis", "honeypotio")
end
end
test "list_received_public/2" do
use_cassette "users/events#list_received_public", match_requests_on: [:query] do
assert elem(list_received_public(@client, "duksis"), 1) == []
end
end
end
| 25.2 | 84 | 0.626323 |
ff43c74bb752006cc297bb5dfb3404ac9ae03877 | 187 | exs | Elixir | test/controllers/page_controller_test.exs | xelcer/phoenix_demo | 84dc516441dd715db7e24b1384c2fea3adffe0f7 | [
"MIT"
] | null | null | null | test/controllers/page_controller_test.exs | xelcer/phoenix_demo | 84dc516441dd715db7e24b1384c2fea3adffe0f7 | [
"MIT"
] | null | null | null | test/controllers/page_controller_test.exs | xelcer/phoenix_demo | 84dc516441dd715db7e24b1384c2fea3adffe0f7 | [
"MIT"
] | null | null | null | defmodule Demo.PageControllerTest do
use Demo.ConnCase
test "GET /", %{conn: conn} do
conn = get conn, "/"
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 20.777778 | 60 | 0.663102 |
ff43cbdf098a40aeabbd6bff294061e06a276362 | 290 | ex | Elixir | widget_market_phoenix/lib/widget_market_phoenix/repo.ex | thegillis/from_rails_to_phoenix | fb230b787fd441e71e93dc8d82b3769eeaeddbf8 | [
"MIT"
] | null | null | null | widget_market_phoenix/lib/widget_market_phoenix/repo.ex | thegillis/from_rails_to_phoenix | fb230b787fd441e71e93dc8d82b3769eeaeddbf8 | [
"MIT"
] | null | null | null | widget_market_phoenix/lib/widget_market_phoenix/repo.ex | thegillis/from_rails_to_phoenix | fb230b787fd441e71e93dc8d82b3769eeaeddbf8 | [
"MIT"
] | null | null | null | defmodule WidgetMarketPhoenix.Repo do
use Ecto.Repo, otp_app: :widget_market_phoenix
@doc """
Dynamically loads the repository url from the
DATABASE_URL environment variable.
"""
def init(_, opts) do
{:ok, Keyword.put(opts, :url, System.get_env("DATABASE_URL"))}
end
end
| 24.166667 | 66 | 0.724138 |
ff43d6b915c75a9d4fe447e28dae81242a6e1e45 | 5,560 | ex | Elixir | lib/sanbase_web/graphql/plugs/request_halt_plug.ex | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | 1 | 2022-01-30T19:51:39.000Z | 2022-01-30T19:51:39.000Z | lib/sanbase_web/graphql/plugs/request_halt_plug.ex | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | null | null | null | lib/sanbase_web/graphql/plugs/request_halt_plug.ex | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | null | null | null | defmodule SanbaseWeb.Graphql.RequestHaltPlug do
@moduledoc ~s"""
Plug that halts requests if some conditions are met
It performs the following checks:
- Check if the request comes from SanSheets and if the user
has access to it
- Check if the rate limits are exceeded
"""
@behaviour Plug
@compile {:inline,
should_halt?: 3,
halt_sansheets_request?: 2,
halt_api_call_limit_reached?: 2,
build_error_msg: 1}
import Plug.Conn
alias Sanbase.ApiCallLimit
alias SanbaseWeb.Graphql.RequestHaltPlug
require Logger
@product_id_api Sanbase.Billing.Product.product_api()
@should_halt_methods [
&RequestHaltPlug.halt_sansheets_request?/2,
&RequestHaltPlug.halt_api_call_limit_reached?/2
]
def init(opts), do: opts
def call(conn, _) do
context = conn.private[:absinthe][:context]
case should_halt?(conn, context, @should_halt_methods) do
{false, conn} ->
conn
{true, conn, error_map} ->
%{error_msg: error_msg, error_code: error_code} = error_map
conn
|> put_resp_content_type("application/json", "charset=utf-8")
|> send_resp(error_code, build_error_msg(error_msg))
|> halt()
end
end
defp should_halt?(conn, _context, []), do: {false, conn}
defp should_halt?(conn, context, [halt_method | rest]) do
case halt_method.(conn, context) do
{false, conn} -> should_halt?(conn, context, rest)
{true, conn, error_map} -> {true, conn, error_map}
end
end
def halt_sansheets_request?(conn, %{auth: %{subscription: %{plan: %{name: plan_name}}}}) do
case is_sansheets_request(conn) and plan_name == "FREE" do
true ->
error_map = %{
error_msg: """
You need to upgrade Sanbase Pro in order to use SanSheets.
If you already have Sanbase Pro, please make sure that a correct API key is provided.
""",
error_code: 401
}
{true, conn, error_map}
false ->
{false, conn}
end
end
def halt_sansheets_request?(conn, _context), do: {false, conn}
def halt_api_call_limit_reached?(conn, %{
rate_limiting_enabled: true,
product_id: @product_id_api,
auth: %{current_user: user, auth_method: auth_method}
}) do
case ApiCallLimit.get_quota(:user, user, auth_method) do
{:error, %{blocked_for_seconds: _} = rate_limit_map} ->
conn =
Sanbase.Utils.Conn.put_extra_resp_headers(
conn,
rate_limit_headers(rate_limit_map)
)
{true, conn, rate_limit_map_to_error_map(rate_limit_map)}
{:ok, %{quota: :infinity}} ->
{false, conn}
{:ok, %{quota: _} = quota_map} ->
conn =
Sanbase.Utils.Conn.put_extra_resp_headers(
conn,
rate_limit_headers(quota_map)
)
{false, conn}
end
end
def halt_api_call_limit_reached?(
conn,
%{
rate_limiting_enabled: true,
product_id: @product_id_api,
remote_ip: remote_ip
} = context
) do
remote_ip = Sanbase.Utils.IP.ip_tuple_to_string(remote_ip)
auth_method = context[:auth][:auth_method] || :unauthorized
case ApiCallLimit.get_quota(:remote_ip, remote_ip, auth_method) do
{:error, %{blocked_for_seconds: _} = rate_limit_map} ->
conn =
Sanbase.Utils.Conn.put_extra_resp_headers(
conn,
rate_limit_headers(rate_limit_map)
)
{true, conn, rate_limit_map_to_error_map(rate_limit_map)}
{:ok, %{quota: :infinity}} ->
{false, conn}
{:ok, %{quota: _} = quota_map} ->
conn =
Sanbase.Utils.Conn.put_extra_resp_headers(
conn,
rate_limit_headers(quota_map)
)
{false, conn}
end
end
def halt_api_call_limit_reached?(conn, _context), do: {false, conn}
defp rate_limit_error_message(%{blocked_for_seconds: seconds}) do
human_duration = Sanbase.DateTimeUtils.seconds_to_human_readable(seconds)
"""
API Rate Limit Reached. Try again in #{seconds} seconds (#{human_duration})
"""
end
defp rate_limit_headers(map) do
%{
api_calls_limits: api_calls_limit,
api_calls_remaining: api_calls_remaining
} = map
headers = [
{"x-ratelimit-remaining-month", api_calls_remaining.month},
{"x-ratelimit-remaining-hour", api_calls_remaining.hour},
{"x-ratelimit-remaining-minute", api_calls_remaining.minute},
{"x-ratelimit-remaining", api_calls_remaining.minute},
{"x-ratelimit-limit-month", api_calls_limit.month},
{"x-ratelimit-limit-hour", api_calls_limit.hour},
{"x-ratelimit-limit-minute", api_calls_limit.minute},
{"x-ratelimit-limit", api_calls_limit.minute}
]
case Map.get(map, :blocked_for_seconds) do
nil ->
headers
blocked_for_seconds ->
[{"x-ratelimit-reset", blocked_for_seconds} | headers]
end
end
defp build_error_msg(msg) do
%{errors: %{details: msg}} |> Jason.encode!()
end
defguard no_auth_header(header) when header in [[], ["null"], [""], nil]
defp is_sansheets_request(conn) do
case Plug.Conn.get_req_header(conn, "user-agent") do
[user_agent] -> String.contains?(user_agent, "Google-Apps-Script")
_ -> false
end
end
defp rate_limit_map_to_error_map(rate_limit_map) do
%{
error_msg: rate_limit_error_message(rate_limit_map),
error_code: 429
}
end
end
| 27.661692 | 95 | 0.636871 |
ff43e6956e8e43cf0a9f70e39b5ec3f26a40fc98 | 2,068 | exs | Elixir | harbor/test/ports/rumble/game/turn_test.exs | miapolis/port7 | 7df1223f83d055eeb6ce8f61f4af8b4f2cf33e74 | [
"MIT"
] | null | null | null | harbor/test/ports/rumble/game/turn_test.exs | miapolis/port7 | 7df1223f83d055eeb6ce8f61f4af8b4f2cf33e74 | [
"MIT"
] | null | null | null | harbor/test/ports/rumble/game/turn_test.exs | miapolis/port7 | 7df1223f83d055eeb6ce8f61f4af8b4f2cf33e74 | [
"MIT"
] | null | null | null | defmodule PortsTest.Rumble.Game.Turn do
use ExUnit.Case
alias Ports.Rumble.Game
# Creates a joined peer
defp jp(id) do
%{id: id, is_joined: true}
end
defp nil_state(peers) do
%{milestone: %{current_turn: nil}, peers: peers}
end
defp do_next(state) do
%{state | milestone: %{current_turn: Game.next_turn(state)}}
end
test "validate first turn is first id" do
peers = %{0 => jp(0), 1 => jp(1)}
state = nil_state(peers)
state = do_next(state)
assert state.milestone.current_turn == 0
# The current turn should be the first id and the peer that is
# not joined should be filtered out
peers = %{0 => %{id: 0, is_joined: false}, 1 => jp(1), 2 => jp(2)}
state = nil_state(peers)
state = do_next(state)
assert state.milestone.current_turn == 1
# Validate sorting
peers = %{0 => %{id: 0, is_joined: false}, 4 => jp(4), 2 => jp(2)}
state = nil_state(peers)
state = do_next(state)
assert state.milestone.current_turn == 2
end
test "validate continue" do
peers = %{0 => jp(0), 1 => %{id: 1, is_joined: false}, 2 => jp(2)}
state = nil_state(peers)
state = do_next(state)
assert state.milestone.current_turn == 0
# Skip the first player because they are not in the game
state = do_next(state)
assert state.milestone.current_turn == 2
end
test "validate loopback" do
peers = %{0 => jp(0), 1 => jp(1)}
state = nil_state(peers)
state = do_next(state)
assert state.milestone.current_turn == 0
state = do_next(state)
assert state.milestone.current_turn == 1
state = do_next(state)
assert state.milestone.current_turn == 0
# Ensure this works when the first player is not id 0
peers = %{0 => %{id: 0, is_joined: false}, 1 => jp(1), 2 => jp(2)}
state = nil_state(peers)
state = do_next(state)
assert state.milestone.current_turn == 1
state = do_next(state)
assert state.milestone.current_turn == 2
state = do_next(state)
assert state.milestone.current_turn == 1
end
end
| 25.530864 | 70 | 0.63588 |
ff4438522d161701a826ae1140644d84e7b0c30b | 1,880 | ex | Elixir | lib/token.ex | laksamanakeris/ExFirebaseAuth | 16a5de53abfad5b66a4bfe251be532defd6d2512 | [
"MIT"
] | 9 | 2021-01-08T17:11:19.000Z | 2021-08-12T18:23:19.000Z | lib/token.ex | laksamanakeris/ExFirebaseAuth | 16a5de53abfad5b66a4bfe251be532defd6d2512 | [
"MIT"
] | 35 | 2021-01-08T17:44:09.000Z | 2022-03-24T04:11:24.000Z | lib/token.ex | laksamanakeris/ExFirebaseAuth | 16a5de53abfad5b66a4bfe251be532defd6d2512 | [
"MIT"
] | 7 | 2021-04-19T14:19:09.000Z | 2021-12-30T20:24:15.000Z | defmodule ExFirebaseAuth.Token do
defp get_public_key(keyid) do
case :ets.lookup(ExFirebaseAuth.KeyStore, keyid) do
[{_keyid, key}] ->
key
[] ->
nil
end
end
@spec issuer :: String.t()
@doc ~S"""
Returns the configured issuer
## Examples
iex> ExFirebaseAuth.Token.issuer()
"https://securetoken.google.com/project-123abc"
"""
def issuer, do: Application.fetch_env!(:ex_firebase_auth, :issuer)
@spec verify_token(String.t()) ::
{:error, String.t()} | {:ok, String.t(), JOSE.JWT.t()}
@doc ~S"""
Verifies a token agains google's public keys. Returns {:ok, user_id, claims} if successful. {:error, _} otherwise.
## Examples
iex> ExFirebaseAuth.Token.verify_token("ey.some.token")
{:ok, "user id", %{}}
iex> ExFirebaseAuth.Token.verify_token("ey.some.token")
{:error, "Invalid JWT header, `kid` missing"}
"""
def verify_token(token_string) do
issuer = issuer()
with {:jwtheader, %{fields: %{"kid" => kid}}} <-
{:jwtheader, JOSE.JWT.peek_protected(token_string)},
# read key from store
{:key, %JOSE.JWK{} = key} <- {:key, get_public_key(kid)},
# check if verify returns true and issuer matches
{:verify, {true, %{fields: %{"iss" => ^issuer, "sub" => sub}} = data, _}} <-
{:verify, JOSE.JWT.verify(key, token_string)} do
{:ok, sub, data}
else
{:jwtheader, _} ->
{:error, "Invalid JWT header, `kid` missing"}
{:key, _} ->
{:error, "Public key retrieved from google was not found or could not be parsed"}
{:verify, {false, _, _}} ->
{:error, "Invalid signature"}
{:verify, {true, _, _}} ->
{:error, "Signed by invalid issuer"}
{:verify, _} ->
{:error, "None of public keys matched auth token's key ids"}
end
end
end
| 28.923077 | 116 | 0.580851 |
ff4444c8cc63853a58d1de4b7271b96fe15e73d4 | 902 | ex | Elixir | memorex/lib/memorex/eraser.ex | at7heb/liveview_elixirconf_2021 | eee64f38ec8a7365e8b728d76cd795a5c23199a9 | [
"MIT"
] | null | null | null | memorex/lib/memorex/eraser.ex | at7heb/liveview_elixirconf_2021 | eee64f38ec8a7365e8b728d76cd795a5c23199a9 | [
"MIT"
] | null | null | null | memorex/lib/memorex/eraser.ex | at7heb/liveview_elixirconf_2021 | eee64f38ec8a7365e8b728d76cd795a5c23199a9 | [
"MIT"
] | 6 | 2021-10-07T14:50:48.000Z | 2021-10-08T14:50:09.000Z | defmodule Memorex.Eraser do
defstruct ~w[plan phrase]a
def new(phrase, cycles \\ 3) when is_binary(phrase) do
string_length = String.length(phrase)
chunk_size = ceil(string_length / cycles)
plan =
0..(string_length - 1)
|> Enum.shuffle()
|> Enum.chunk_every(chunk_size)
%__MODULE__{plan: plan, phrase: String.graphemes(phrase)}
end
def erase(%{plan: [current_plan | plan], phrase: phrase}) do
phrase =
Enum.reduce(current_plan, phrase, fn index, phrase ->
char = Enum.at(phrase, index)
replace_char =
if String.match?(char, ~r/\w/i) do
"_"
else
char
end
List.replace_at(phrase, index, replace_char)
end)
%__MODULE__{plan: plan, phrase: phrase}
end
def erase(eraser), do: eraser
def as_string(%{phrase: phrase}) do
Enum.join(phrase)
end
end
| 22 | 62 | 0.608647 |
ff444aa88e2c6ade5701d9f7b40dfb6c208e758a | 730 | ex | Elixir | test/support/feature_case.ex | b-a-b-e/ProComPrag | 50c6c87933e71cb69b5c95bc77bf591a34661410 | [
"MIT"
] | 1 | 2020-05-31T21:54:40.000Z | 2020-05-31T21:54:40.000Z | test/support/feature_case.ex | b-a-b-e/ProComPrag | 50c6c87933e71cb69b5c95bc77bf591a34661410 | [
"MIT"
] | 64 | 2019-07-29T22:06:16.000Z | 2022-03-28T23:46:58.000Z | test/support/feature_case.ex | babe-project/BABE | 50c6c87933e71cb69b5c95bc77bf591a34661410 | [
"MIT"
] | 1 | 2019-07-28T19:17:43.000Z | 2019-07-28T19:17:43.000Z | defmodule Magpie.FeatureCase do
@moduledoc """
Defines the test case to be used by feature tests, using Wallaby.
"""
use ExUnit.CaseTemplate
using do
quote do
use Wallaby.DSL
alias Magpie.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Magpie.Router.Helpers
import Magpie.TestHelpers
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Magpie.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Magpie.Repo, {:shared, self()})
end
metadata = Phoenix.Ecto.SQL.Sandbox.metadata_for(Magpie.Repo, self())
{:ok, session} = Wallaby.start_session(metadata: metadata)
{:ok, session: session}
end
end
| 21.470588 | 73 | 0.672603 |
ff448f4ba6d17dbe279b26d7032a311cf83f9131 | 63 | ex | Elixir | lib/dornach_web/views/layout_view.ex | ream88/jw-dornach | 0a25deae13fafd832421bde21bc21035a128ac7d | [
"MIT"
] | null | null | null | lib/dornach_web/views/layout_view.ex | ream88/jw-dornach | 0a25deae13fafd832421bde21bc21035a128ac7d | [
"MIT"
] | 4 | 2020-01-28T10:12:25.000Z | 2021-05-10T23:05:12.000Z | lib/dornach_web/views/layout_view.ex | ream88/jw-dornach | 0a25deae13fafd832421bde21bc21035a128ac7d | [
"MIT"
] | null | null | null | defmodule DornachWeb.LayoutView do
use DornachWeb, :view
end
| 15.75 | 34 | 0.809524 |
ff44972ecf29b84a423efb9d25492d52edc97fde | 17,785 | ex | Elixir | lib/elixir/lib/calendar/date.ex | kennyballou/elixir | 0beeef8b1f49b6b347e22fe3ad7b654f18a963e9 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/calendar/date.ex | kennyballou/elixir | 0beeef8b1f49b6b347e22fe3ad7b654f18a963e9 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/calendar/date.ex | kennyballou/elixir | 0beeef8b1f49b6b347e22fe3ad7b654f18a963e9 | [
"Apache-2.0"
] | null | null | null | defmodule Date do
@moduledoc """
A Date struct and functions.
The Date struct contains the fields year, month, day and calendar.
New dates can be built with the `new/3` function or using the `~D`
sigil:
iex> ~D[2000-01-01]
~D[2000-01-01]
Both `new/3` and sigil return a struct where the date fields can
be accessed directly:
iex> date = ~D[2000-01-01]
iex> date.year
2000
iex> date.month
1
The functions on this module work with the `Date` struct as well
as any struct that contains the same fields as the `Date` struct,
such as `NaiveDateTime` and `DateTime`. Such functions expect
`t:Calendar.date/0` in their typespecs (instead of `t:t/0`).
Developers should avoid creating the Date structs directly
and instead rely on the functions provided by this module as well
as the ones in 3rd party calendar libraries.
## Comparing dates
Comparisons in Elixir using `==`, `>`, `<` and similar are structural
and based on the `Date` struct fields. For proper comparison between
dates, use the `compare/2` function.
## Using epochs
The `add/2` and `diff/2` functions can be used for computing dates
or retrieving the amount of days betweens instants. For example, if there
is an interest in computing the amount of days from the Unix epoch
(1970-01-01):
iex> Date.diff(~D[2010-04-17], ~D[1970-01-01])
14716
iex> Date.add(~D[1970-01-01], 14716)
~D[2010-04-17]
Those functions are optimized to deal with common epochs, such
as the Unix Epoch above or the Gregorian Epoch (0000-01-01).
"""
@enforce_keys [:year, :month, :day]
defstruct [:year, :month, :day, calendar: Calendar.ISO]
@type t :: %Date{year: Calendar.year, month: Calendar.month,
day: Calendar.day, calendar: Calendar.calendar}
@doc """
Returns a range of dates.
A range of dates represents a discrete number of dates where
the first and last values are dates with matching calendars.
Ranges of dates can be either increasing (`first <= last`) or
decreasing (`first > last`). They are also always inclusive.
## Examples
iex> Date.range(~D[1999-01-01], ~D[2000-01-01])
#DateRange<~D[1999-01-01], ~D[2000-01-01]>
iex> Date.range(~N[2000-01-01 09:00:00], ~D[1999-01-01])
#DateRange<~N[2000-01-01 09:00:00], ~D[1999-01-01]>
A range of dates implements the `Enumerable` protocol, which means
functions in the `Enum` module can be used to work with
ranges:
iex> range = Date.range(~D[2001-01-01], ~D[2002-01-01])
iex> Enum.count(range)
366
iex> Enum.member?(range, ~D[2001-02-01])
true
iex> Enum.reduce(range, 0, fn _date, acc -> acc - 1 end)
-366
"""
@spec range(Calendar.date, Calendar.date) :: Date.Range.t
def range(%{calendar: calendar} = first, %{calendar: calendar} = last) do
{first_days, _} = to_iso_days(first)
{last_days, _} = to_iso_days(last)
%Date.Range{
first: first,
last: last,
first_in_iso_days: first_days,
last_in_iso_days: last_days,
}
end
def range(%{calendar: _, year: _, month: _, day: _},
%{calendar: _, year: _, month: _, day: _}) do
raise ArgumentError, "both dates must have matching calendars"
end
@doc """
Returns the current date in UTC.
## Examples
iex> date = Date.utc_today()
iex> date.year >= 2016
true
"""
@spec utc_today(Calendar.calendar) :: t
def utc_today(calendar \\ Calendar.ISO)
def utc_today(Calendar.ISO) do
{:ok, {year, month, day}, _, _} = Calendar.ISO.from_unix(System.os_time, :native)
%Date{year: year, month: month, day: day}
end
def utc_today(calendar) do
calendar
|> DateTime.utc_now
|> DateTime.to_date
end
@doc """
Returns true if the year in the given `date` is a leap year.
## Examples
iex> Date.leap_year?(~D[2000-01-01])
true
iex> Date.leap_year?(~D[2001-01-01])
false
iex> Date.leap_year?(~D[2004-01-01])
true
iex> Date.leap_year?(~D[1900-01-01])
false
iex> Date.leap_year?(~N[2004-01-01 01:23:45])
true
"""
@spec leap_year?(Calendar.date) :: boolean()
def leap_year?(date)
def leap_year?(%{calendar: calendar, year: year}) do
calendar.leap_year?(year)
end
@doc """
Returns the number of days in the given `date` month.
## Examples
iex> Date.days_in_month(~D[1900-01-13])
31
iex> Date.days_in_month(~D[1900-02-09])
28
iex> Date.days_in_month(~N[2000-02-20 01:23:45])
29
"""
@spec days_in_month(Calendar.date) :: Calendar.day
def days_in_month(date)
def days_in_month(%{calendar: calendar, year: year, month: month}) do
calendar.days_in_month(year, month)
end
@doc """
Builds a new ISO date.
Expects all values to be integers. Returns `{:ok, date}` if each
entry fits its appropriate range, returns `{:error, reason}` otherwise.
## Examples
iex> Date.new(2000, 1, 1)
{:ok, ~D[2000-01-01]}
iex> Date.new(2000, 13, 1)
{:error, :invalid_date}
iex> Date.new(2000, 2, 29)
{:ok, ~D[2000-02-29]}
iex> Date.new(2000, 2, 30)
{:error, :invalid_date}
iex> Date.new(2001, 2, 29)
{:error, :invalid_date}
"""
@spec new(Calendar.year, Calendar.month, Calendar.day) :: {:ok, t} | {:error, atom}
def new(year, month, day, calendar \\ Calendar.ISO) do
if calendar.valid_date?(year, month, day) do
{:ok, %Date{year: year, month: month, day: day, calendar: calendar}}
else
{:error, :invalid_date}
end
end
@doc """
Converts the given date to a string according to its calendar.
### Examples
iex> Date.to_string(~D[2000-02-28])
"2000-02-28"
iex> Date.to_string(~N[2000-02-28 01:23:45])
"2000-02-28"
"""
@spec to_string(Calendar.date) :: String.t
def to_string(date)
def to_string(%{calendar: calendar, year: year, month: month, day: day}) do
calendar.date_to_string(year, month, day)
end
@doc """
Parses the extended "Dates" format described by
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
## Examples
iex> Date.from_iso8601("2015-01-23")
{:ok, ~D[2015-01-23]}
iex> Date.from_iso8601("2015:01:23")
{:error, :invalid_format}
iex> Date.from_iso8601("2015-01-32")
{:error, :invalid_date}
"""
@spec from_iso8601(String.t) :: {:ok, t} | {:error, atom}
def from_iso8601(string, calendar \\ Calendar.ISO)
def from_iso8601(<<year::4-bytes, ?-, month::2-bytes, ?-, day::2-bytes>>, calendar) do
with {year, ""} <- Integer.parse(year),
{month, ""} <- Integer.parse(month),
{day, ""} <- Integer.parse(day) do
with {:ok, date} <- new(year, month, day, Calendar.ISO),
do: convert(date, calendar)
else
_ -> {:error, :invalid_format}
end
end
def from_iso8601(<<_::binary>>, _calendar) do
{:error, :invalid_format}
end
@doc """
Parses the extended "Dates" format described by
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
Raises if the format is invalid.
## Examples
iex> Date.from_iso8601!("2015-01-23")
~D[2015-01-23]
iex> Date.from_iso8601!("2015:01:23")
** (ArgumentError) cannot parse "2015:01:23" as date, reason: :invalid_format
"""
@spec from_iso8601!(String.t) :: t
def from_iso8601!(string, calendar \\ Calendar.ISO) do
case from_iso8601(string, calendar) do
{:ok, value} ->
value
{:error, reason} ->
raise ArgumentError, "cannot parse #{inspect string} as date, reason: #{inspect reason}"
end
end
@doc """
Converts the given `date` to
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
By default, `Date.to_iso8601/2` returns dates formatted in the "extended"
format, for human readability. It also supports the "basic" format through passing the `:basic` option.
Only supports converting dates which are in the ISO calendar,
or other calendars in which the days also start at midnight.
Attempting to convert dates from other calendars will raise an `ArgumentError`.
### Examples
iex> Date.to_iso8601(~D[2000-02-28])
"2000-02-28"
iex> Date.to_iso8601(~D[2000-02-28], :basic)
"20000228"
iex> Date.to_iso8601(~N[2000-02-28 00:00:00])
"2000-02-28"
"""
@spec to_iso8601(Calendar.date, :extended | :basic) :: String.t
def to_iso8601(date, format \\ :extended) when format in [:basic, :extended] do
%{year: year, month: month, day: day} = convert!(date, Calendar.ISO)
Calendar.ISO.date_to_iso8601(year, month, day, format)
end
@doc """
Converts the given `date` to an Erlang date tuple.
Only supports converting dates which are in the ISO calendar,
or other calendars in which the days also start at midnight.
Attempting to convert dates from other calendars will raise.
## Examples
iex> Date.to_erl(~D[2000-01-01])
{2000, 1, 1}
iex> Date.to_erl(~N[2000-01-01 00:00:00])
{2000, 1, 1}
"""
@spec to_erl(Calendar.date) :: :calendar.date
def to_erl(date) do
%{year: year, month: month, day: day} = convert!(date, Calendar.ISO)
{year, month, day}
end
@doc """
Converts an Erlang date tuple to a `Date` struct.
Only supports converting dates which are in the ISO calendar,
or other calendars in which the days also start at midnight.
Attempting to convert dates from other calendars will return an error tuple.
## Examples
iex> Date.from_erl({2000, 1, 1})
{:ok, ~D[2000-01-01]}
iex> Date.from_erl({2000, 13, 1})
{:error, :invalid_date}
"""
@spec from_erl(:calendar.date) :: {:ok, t} | {:error, atom}
def from_erl(tuple, calendar \\ Calendar.ISO)
def from_erl({year, month, day}, calendar) do
with {:ok, date} <- new(year, month, day, Calendar.ISO),
do: convert(date, calendar)
end
@doc """
Converts an Erlang date tuple but raises for invalid dates.
## Examples
iex> Date.from_erl!({2000, 1, 1})
~D[2000-01-01]
iex> Date.from_erl!({2000, 13, 1})
** (ArgumentError) cannot convert {2000, 13, 1} to date, reason: :invalid_date
"""
@spec from_erl!(:calendar.date) :: t
def from_erl!(tuple) do
case from_erl(tuple) do
{:ok, value} ->
value
{:error, reason} ->
raise ArgumentError, "cannot convert #{inspect tuple} to date, reason: #{inspect reason}"
end
end
@doc """
Compares two date structs.
Returns `:gt` if first date is later than the second
and `:lt` for vice versa. If the two dates are equal
`:eq` is returned.
## Examples
iex> Date.compare(~D[2016-04-16], ~D[2016-04-28])
:lt
This function can also be used to compare across more
complex calendar types by considering only the date fields:
iex> Date.compare(~D[2016-04-16], ~N[2016-04-28 01:23:45])
:lt
iex> Date.compare(~D[2016-04-16], ~N[2016-04-16 01:23:45])
:eq
iex> Date.compare(~N[2016-04-16 12:34:56], ~N[2016-04-16 01:23:45])
:eq
"""
@spec compare(Calendar.date, Calendar.date) :: :lt | :eq | :gt
def compare(%{calendar: calendar, year: year1, month: month1, day: day1},
%{calendar: calendar, year: year2, month: month2, day: day2}) do
case {{year1, month1, day1}, {year2, month2, day2}} do
{first, second} when first > second -> :gt
{first, second} when first < second -> :lt
_ -> :eq
end
end
def compare(date1, date2) do
if Calendar.compatible_calendars?(date1.calendar, date2.calendar) do
case {to_iso_days(date1), to_iso_days(date2)} do
{first, second} when first > second -> :gt
{first, second} when first < second -> :lt
_ -> :eq
end
else
raise ArgumentError, """
cannot compare #{inspect date1} with #{inspect date2}.
This comparison would be ambiguous as their calendars have incompatible day rollover moments.
Specify an exact time of day (using `DateTime`s) to resolve this ambiguity
"""
end
end
@doc """
Converts the given `date` from it's calendar to the given `calendar`.
Returns `{:ok, date}` if the calendars are compatible,
or `{:error, :incompatible_calendars}` if they are not.
See also `Calendar.compatible_calendars?/2`.
## Examples
Imagine someone implements `Calendar.Holocene`, a calendar based on the
Gregorian calendar that adds exactly 10,000 years to the current Gregorian
year:
iex> Date.convert(~D[2000-01-01], Calendar.Holocene)
{:ok, %Date{calendar: Calendar.Holocene, year: 12000, month: 1, day: 1}}
"""
@spec convert(Calendar.date, Calendar.calendar) :: {:ok, t} | {:error, :incompatible_calendars}
def convert(%{calendar: calendar, year: year, month: month, day: day}, calendar) do
{:ok, %Date{calendar: calendar, year: year, month: month, day: day}}
end
def convert(%{calendar: calendar} = date, target_calendar) do
if Calendar.compatible_calendars?(calendar, target_calendar) do
result_date =
date
|> to_iso_days()
|> from_iso_days(target_calendar)
{:ok, result_date}
else
{:error, :incompatible_calendars}
end
end
@doc """
Similar to `Date.convert/2`, but raises an `ArgumentError`
if the conversion between the two calendars is not possible.
## Examples
Imagine someone implements `Calendar.Holocene`, a calendar based on the
Gregorian calendar that adds exactly 10,000 years to the current Gregorian
year:
iex> Date.convert!(~D[2000-01-01], Calendar.Holocene)
%Date{calendar: Calendar.Holocene, year: 12000, month: 1, day: 1}
"""
@spec convert!(Calendar.date, Calendar.calendar) :: t
def convert!(date, calendar) do
case convert(date, calendar) do
{:ok, value} ->
value
{:error, reason} ->
raise ArgumentError, "cannot convert #{inspect date} to target calendar #{inspect calendar}, reason: #{inspect reason}"
end
end
@doc """
Adds the number of days to the given `date`.
The days are counted as gregorian days. The date is returned in the same
calendar as it was given in.
## Examples
iex> Date.add(~D[2000-01-03], -2)
~D[2000-01-01]
iex> Date.add(~D[2000-01-01], 2)
~D[2000-01-03]
iex> Date.add(~N[2000-01-01 09:00:00], 2)
~D[2000-01-03]
"""
@spec add(Calendar.date, integer()) :: t
def add(%{calendar: calendar} = date, days) do
{iso_days_days, fraction} = to_iso_days(date)
from_iso_days({iso_days_days + days, fraction}, calendar)
end
@doc """
Calculates the difference between two dates, in a full number of days.
It returns the number of gregorian days between the dates. Only `Date`
structs that follow the same or compatible calendars can be compared
this way. If two calendars are not compatible, it will raise.
## Examples
iex> Date.diff(~D[2000-01-03], ~D[2000-01-01])
2
iex> Date.diff(~D[2000-01-01], ~D[2000-01-03])
-2
iex> Date.diff(~D[2000-01-01], ~N[2000-01-03 09:00:00])
-2
"""
@spec diff(Calendar.date, Calendar.date) :: integer
def diff(%{calendar: Calendar.ISO, year: year1, month: month1, day: day1},
%{calendar: Calendar.ISO, year: year2, month: month2, day: day2}) do
Calendar.ISO.date_to_iso_days_days(year1, month1, day1) -
Calendar.ISO.date_to_iso_days_days(year2, month2, day2)
end
def diff(%{calendar: calendar1} = date1, %{calendar: calendar2} = date2) do
if Calendar.compatible_calendars?(calendar1, calendar2) do
{days1, _} = to_iso_days(date1)
{days2, _} = to_iso_days(date2)
days1 - days2
else
raise ArgumentError, "cannot calculate the difference between #{inspect date1} and #{inspect date2} because their calendars are not compatible and thus the result would be ambiguous"
end
end
defp to_iso_days(%{calendar: Calendar.ISO, year: year, month: month, day: day}) do
{Calendar.ISO.date_to_iso_days_days(year, month, day), {0, 86400000000}}
end
defp to_iso_days(%{calendar: calendar, year: year, month: month, day: day}) do
calendar.naive_datetime_to_iso_days(year, month, day, 0, 0, 0, {0, 0})
end
defp from_iso_days({days, _}, Calendar.ISO) do
{year, month, day} = Calendar.ISO.date_from_iso_days_days(days)
%Date{year: year, month: month, day: day, calendar: Calendar.ISO}
end
defp from_iso_days(iso_days, target_calendar) do
{year, month, day, _, _, _, _} = target_calendar.naive_datetime_from_iso_days(iso_days)
%Date{year: year, month: month, day: day, calendar: target_calendar}
end
@doc """
Calculates the day of the week of a given `date`.
Returns the day of the week as an integer. For the ISO 8601
calendar (the default), it is an integer from 1 to 7, where
1 is Monday and 7 is Sunday.
## Examples
iex> Date.day_of_week(~D[2016-10-31])
1
iex> Date.day_of_week(~D[2016-11-01])
2
iex> Date.day_of_week(~N[2016-11-01 01:23:45])
2
"""
@spec day_of_week(Calendar.date) :: non_neg_integer()
def day_of_week(date)
def day_of_week(%{calendar: calendar, year: year, month: month, day: day}) do
calendar.day_of_week(year, month, day)
end
## Helpers
defimpl String.Chars do
def to_string(%{calendar: calendar, year: year, month: month, day: day}) do
calendar.date_to_string(year, month, day)
end
end
defimpl Inspect do
def inspect(%{calendar: Calendar.ISO, year: year, month: month, day: day}, _) do
"~D[" <> Calendar.ISO.date_to_string(year, month, day) <> "]"
end
def inspect(date, opts) do
Inspect.Any.inspect(date, opts)
end
end
end
| 29.740803 | 188 | 0.644138 |
ff44a46bba74944c71f3b230f7830b5fb0bc0406 | 762 | exs | Elixir | test/irc/connection_test.exs | aeturnum/blur | e59bb4a7451cea60d92166e495a3029645a1ffaf | [
"MIT"
] | 19 | 2015-07-21T04:58:12.000Z | 2022-01-20T23:25:18.000Z | test/irc/connection_test.exs | aeturnum/blur | e59bb4a7451cea60d92166e495a3029645a1ffaf | [
"MIT"
] | 3 | 2020-07-17T22:29:17.000Z | 2020-07-20T00:31:41.000Z | test/irc/connection_test.exs | aeturnum/blur | e59bb4a7451cea60d92166e495a3029645a1ffaf | [
"MIT"
] | 3 | 2015-08-26T14:59:37.000Z | 2021-05-05T04:00:06.000Z | defmodule Blur.IRC.ConnectionTest do
use ExUnit.Case, async: true
doctest Blur.IRC.Connection
alias Blur.IRC.Connection.State
alias Blur.IRC.Connection
# test "on connecting to the IRC server" do
# {:noreply, state} =
# Connection.handle_info(
# {:connected, "localhost", 6667},
# %State{}
# )
# assert state === %State{}
# end
describe "on disconnecting from the channel" do
test "regular IRC" do
{:noreply, state} = Connection.handle_info({:disconnected}, %State{})
assert state === %State{}
end
test "Twitch Tags" do
{:noreply, state} =
Connection.handle_info({:disconnected, "@display-name=rockerBOO"}, %State{})
assert state === %State{}
end
end
end
| 23.090909 | 84 | 0.620735 |
ff44bdfdb6001bf0e0086037449c9645ff4212e3 | 4,641 | exs | Elixir | test/credo/check/readability/large_numbers_test.exs | ayrat555/credo | 517699f82fc6ebe266152d997b64878b334e7bd8 | [
"MIT"
] | null | null | null | test/credo/check/readability/large_numbers_test.exs | ayrat555/credo | 517699f82fc6ebe266152d997b64878b334e7bd8 | [
"MIT"
] | null | null | null | test/credo/check/readability/large_numbers_test.exs | ayrat555/credo | 517699f82fc6ebe266152d997b64878b334e7bd8 | [
"MIT"
] | null | null | null | defmodule Credo.Check.Readability.LargeNumbersTest do
use Credo.TestHelper
@described_check Credo.Check.Readability.LargeNumbers
#
# cases NOT raising issues
#
test "it should NOT report expected code" do
"""
@budgets %{
"budget1": 100_000,
"budget2": 200_000,
"budget3": 300_000,
"budget4": 500_000,
"budget5": 1_000_000,
"budget6": 2_000_000
}
@int32_min -2_147_483_648
@int32_max 2_147_483_647
@int64_min -9_223_372_036_854_775_808
@int64_max 9_223_372_036_854_775_807
def numbers do
1024 + 1_000_000 + 11_000 + 22_000 + 33_000
10_000..20_000
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should allow multiple large floats on a line" do
"""
def numbers do
100_000.1 + 5_000_000.2 + 66_000.3
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should not complain about numbers in anon function calls" do
"""
defmodule Demo.LargeNumberAnonWarning do
@moduledoc false
def harmless_function do
say_num = fn num ->
IO.inspect num
end
say_num.( say_num.(10_000), say_num.(20_000) )
end
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "it should not complain about non-decimal numbers" do
"""
def numbers do
0xFFFF
0x123456
0b1111_1111_1111_1111
0o777_777
end
"""
|> to_source_file
|> refute_issues(@described_check)
end
test "check old false positive is fixed /1" do
" defmacro oid_ansi_x9_62, do: quote do: {1,2,840,10_045}"
|> to_source_file
|> refute_issues(@described_check)
end
test "check old false positive is fixed /2" do
"""
%{
bounds: [
0, 1, 2, 5, 10, 20, 30, 65, 85,
100, 200, 400, 800,
1_000, 2_000, 4_000, 8_000, 16_000]
}
"""
|> to_source_file
|> refute_issues(@described_check)
end
#
# cases raising issues
#
test "it should report a violation" do
"""
def numbers do
1024 + 1000000 + 43534
end
"""
|> to_source_file
|> assert_issues(@described_check)
end
test "it should report a violation, since it is formatted incorrectly" do
"""
def numbers do
1024 + 10_00_00_0 + 43534
end
"""
|> to_source_file
|> assert_issues(@described_check)
end
test "it should report only one violation" do
"""
def numbers do
1024 + 1000000 + 43534
end
"""
|> to_source_file
|> assert_issue(@described_check, only_greater_than: 50000)
end
test "it should report only one violation for ranges /1" do
"""
def numbers do
10000..20_000
end
"""
|> to_source_file
|> assert_issue(@described_check)
end
test "it should report only one violation for ranges /2" do
"""
def numbers do
10_000..20000
end
"""
|> to_source_file
|> assert_issue(@described_check)
end
test "it should report only two violation for ranges" do
"""
def numbers do
10000..20000
end
"""
|> to_source_file
|> assert_issues(@described_check)
end
test "it should report a violation /2" do
"""
defp numbers do
1024 + 43534
end
"""
|> to_source_file
|> assert_issue(@described_check)
end
test "it should report a violation /3" do
"""
defp numbers do
1024 + 43534.0
end
"""
|> to_source_file
|> assert_issue(@described_check)
end
test "it should report a violation /4" do
"""
defmacro numbers do
1024 + 1_000000
end
"""
|> to_source_file
|> assert_issue(@described_check)
end
test "it should format floating point numbers nicely" do
"""
def numbers do
10000.00001
end
"""
|> to_source_file
|> assert_issue(@described_check, fn %Credo.Issue{message: message} ->
assert Regex.run(~r/[\d\._]+/, message) |> hd == "10_000.00001"
end)
end
test "it should report all digits from the source" do
"""
def numbers do
10000.000010
end
"""
|> to_source_file
|> assert_issue(@described_check, fn %Credo.Issue{message: message} ->
assert Regex.run(~r/[\d\._]+/, message) |> hd == "10_000.000010"
end)
end
test "it should detect report issues with multiple large floats on a line" do
"""
def numbers do
100_000.1 + 5_000_000.2 + 66000.3
end
"""
|> to_source_file
|> assert_issue(@described_check)
end
end
| 20.811659 | 79 | 0.607412 |
ff44bfa35c1512d49c258f585f36fc2bb81dc1d5 | 21,942 | ex | Elixir | lib/aws/generated/firehose.ex | kw7oe/aws-elixir | 4ba60502dde270c83143822c9964018c7770bad7 | [
"Apache-2.0"
] | 341 | 2018-04-04T19:06:19.000Z | 2022-03-25T21:34:23.000Z | lib/aws/generated/firehose.ex | kw7oe/aws-elixir | 4ba60502dde270c83143822c9964018c7770bad7 | [
"Apache-2.0"
] | 82 | 2018-04-04T17:32:33.000Z | 2022-03-24T15:12:04.000Z | lib/aws/generated/firehose.ex | kw7oe/aws-elixir | 4ba60502dde270c83143822c9964018c7770bad7 | [
"Apache-2.0"
] | 76 | 2018-04-10T20:19:44.000Z | 2022-03-15T13:49:19.000Z | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.Firehose do
@moduledoc """
Amazon Kinesis Data Firehose API Reference
Amazon Kinesis Data Firehose is a fully managed service that delivers real-time
streaming data to destinations such as Amazon Simple Storage Service (Amazon
S3), Amazon Elasticsearch Service (Amazon ES), Amazon Redshift, and Splunk.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "Firehose",
api_version: "2015-08-04",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "firehose",
global?: false,
protocol: "json",
service_id: "Firehose",
signature_version: "v4",
signing_name: "firehose",
target_prefix: "Firehose_20150804"
}
end
@doc """
Creates a Kinesis Data Firehose delivery stream.
By default, you can create up to 50 delivery streams per AWS Region.
This is an asynchronous operation that immediately returns. The initial status
of the delivery stream is `CREATING`. After the delivery stream is created, its
status is `ACTIVE` and it now accepts data. If the delivery stream creation
fails, the status transitions to `CREATING_FAILED`. Attempts to send data to a
delivery stream that is not in the `ACTIVE` state cause an exception. To check
the state of a delivery stream, use `DescribeDeliveryStream`.
If the status of a delivery stream is `CREATING_FAILED`, this status doesn't
change, and you can't invoke `CreateDeliveryStream` again on it. However, you
can invoke the `DeleteDeliveryStream` operation to delete it.
A Kinesis Data Firehose delivery stream can be configured to receive records
directly from providers using `PutRecord` or `PutRecordBatch`, or it can be
configured to use an existing Kinesis stream as its source. To specify a Kinesis
data stream as input, set the `DeliveryStreamType` parameter to
`KinesisStreamAsSource`, and provide the Kinesis stream Amazon Resource Name
(ARN) and role ARN in the `KinesisStreamSourceConfiguration` parameter.
To create a delivery stream with server-side encryption (SSE) enabled, include
`DeliveryStreamEncryptionConfigurationInput` in your request. This is optional.
You can also invoke `StartDeliveryStreamEncryption` to turn on SSE for an
existing delivery stream that doesn't have SSE enabled.
A delivery stream is configured with a single destination: Amazon S3, Amazon ES,
Amazon Redshift, or Splunk. You must specify only one of the following
destination configuration parameters: `ExtendedS3DestinationConfiguration`,
`S3DestinationConfiguration`, `ElasticsearchDestinationConfiguration`,
`RedshiftDestinationConfiguration`, or `SplunkDestinationConfiguration`.
When you specify `S3DestinationConfiguration`, you can also provide the
following optional values: BufferingHints, `EncryptionConfiguration`, and
`CompressionFormat`. By default, if no `BufferingHints` value is provided,
Kinesis Data Firehose buffers data up to 5 MB or for 5 minutes, whichever
condition is satisfied first. `BufferingHints` is a hint, so there are some
cases where the service cannot adhere to these conditions strictly. For example,
record boundaries might be such that the size is a little over or under the
configured buffering size. By default, no encryption is performed. We strongly
recommend that you enable encryption to ensure secure data storage in Amazon S3.
A few notes about Amazon Redshift as a destination:
* An Amazon Redshift destination requires an S3 bucket as
intermediate location. Kinesis Data Firehose first delivers data to Amazon S3
and then uses `COPY` syntax to load data into an Amazon Redshift table. This is
specified in the `RedshiftDestinationConfiguration.S3Configuration` parameter.
* The compression formats `SNAPPY` or `ZIP` cannot be specified in
`RedshiftDestinationConfiguration.S3Configuration` because the Amazon Redshift
`COPY` operation that reads from the S3 bucket doesn't support these compression
formats.
* We strongly recommend that you use the user name and password you
provide exclusively with Kinesis Data Firehose, and that the permissions for the
account are restricted for Amazon Redshift `INSERT` permissions.
Kinesis Data Firehose assumes the IAM role that is configured as part of the
destination. The role should allow the Kinesis Data Firehose principal to assume
the role, and the role should have permissions that allow the service to deliver
the data. For more information, see [Grant Kinesis Data Firehose Access to an Amazon S3
Destination](https://docs.aws.amazon.com/firehose/latest/dev/controlling-access.html#using-iam-s3)
in the *Amazon Kinesis Data Firehose Developer Guide*.
"""
def create_delivery_stream(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateDeliveryStream", input, options)
end
@doc """
Deletes a delivery stream and its data.
To check the state of a delivery stream, use `DescribeDeliveryStream`. You can
delete a delivery stream only if it is in one of the following states: `ACTIVE`,
`DELETING`, `CREATING_FAILED`, or `DELETING_FAILED`. You can't delete a delivery
stream that is in the `CREATING` state. While the deletion request is in
process, the delivery stream is in the `DELETING` state.
While the delivery stream is in the `DELETING` state, the service might continue
to accept records, but it doesn't make any guarantees with respect to delivering
the data. Therefore, as a best practice, first stop any applications that are
sending records before you delete a delivery stream.
"""
def delete_delivery_stream(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteDeliveryStream", input, options)
end
@doc """
Describes the specified delivery stream and its status.
For example, after your delivery stream is created, call
`DescribeDeliveryStream` to see whether the delivery stream is `ACTIVE` and
therefore ready for data to be sent to it.
If the status of a delivery stream is `CREATING_FAILED`, this status doesn't
change, and you can't invoke `CreateDeliveryStream` again on it. However, you
can invoke the `DeleteDeliveryStream` operation to delete it. If the status is
`DELETING_FAILED`, you can force deletion by invoking `DeleteDeliveryStream`
again but with `DeleteDeliveryStreamInput$AllowForceDelete` set to true.
"""
def describe_delivery_stream(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDeliveryStream", input, options)
end
@doc """
Lists your delivery streams in alphabetical order of their names.
The number of delivery streams might be too large to return using a single call
to `ListDeliveryStreams`. You can limit the number of delivery streams returned,
using the `Limit` parameter. To determine whether there are more delivery
streams to list, check the value of `HasMoreDeliveryStreams` in the output. If
there are more delivery streams to list, you can request them by calling this
operation again and setting the `ExclusiveStartDeliveryStreamName` parameter to
the name of the last delivery stream returned in the last call.
"""
def list_delivery_streams(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListDeliveryStreams", input, options)
end
@doc """
Lists the tags for the specified delivery stream.
This operation has a limit of five transactions per second per account.
"""
def list_tags_for_delivery_stream(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForDeliveryStream", input, options)
end
@doc """
Writes a single data record into an Amazon Kinesis Data Firehose delivery
stream.
To write multiple data records into a delivery stream, use `PutRecordBatch`.
Applications using these operations are referred to as producers.
By default, each delivery stream can take in up to 2,000 transactions per
second, 5,000 records per second, or 5 MB per second. If you use `PutRecord` and
`PutRecordBatch`, the limits are an aggregate across these two operations for
each delivery stream. For more information about limits and how to request an
increase, see [Amazon Kinesis Data Firehose Limits](https://docs.aws.amazon.com/firehose/latest/dev/limits.html).
You must specify the name of the delivery stream and the data record when using
`PutRecord`. The data record consists of a data blob that can be up to 1,000 KB
in size, and any kind of data. For example, it can be a segment from a log file,
geographic location data, website clickstream data, and so on.
Kinesis Data Firehose buffers records before delivering them to the destination.
To disambiguate the data blobs at the destination, a common solution is to use
delimiters in the data, such as a newline (`\n`) or some other character unique
within the data. This allows the consumer application to parse individual data
items when reading the data from the destination.
The `PutRecord` operation returns a `RecordId`, which is a unique string
assigned to each record. Producer applications can use this ID for purposes such
as auditability and investigation.
If the `PutRecord` operation throws a `ServiceUnavailableException`, back off
and retry. If the exception persists, it is possible that the throughput limits
have been exceeded for the delivery stream.
Data records sent to Kinesis Data Firehose are stored for 24 hours from the time
they are added to a delivery stream as it tries to send the records to the
destination. If the destination is unreachable for more than 24 hours, the data
is no longer available.
Don't concatenate two or more base64 strings to form the data fields of your
records. Instead, concatenate the raw data, then perform base64 encoding.
"""
def put_record(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutRecord", input, options)
end
@doc """
Writes multiple data records into a delivery stream in a single call, which can
achieve higher throughput per producer than when writing single records.
To write single data records into a delivery stream, use `PutRecord`.
Applications using these operations are referred to as producers.
For information about service quota, see [Amazon Kinesis Data Firehose Quota](https://docs.aws.amazon.com/firehose/latest/dev/limits.html).
Each `PutRecordBatch` request supports up to 500 records. Each record in the
request can be as large as 1,000 KB (before 64-bit encoding), up to a limit of 4
MB for the entire request. These limits cannot be changed.
You must specify the name of the delivery stream and the data record when using
`PutRecord`. The data record consists of a data blob that can be up to 1,000 KB
in size, and any kind of data. For example, it could be a segment from a log
file, geographic location data, website clickstream data, and so on.
Kinesis Data Firehose buffers records before delivering them to the destination.
To disambiguate the data blobs at the destination, a common solution is to use
delimiters in the data, such as a newline (`\n`) or some other character unique
within the data. This allows the consumer application to parse individual data
items when reading the data from the destination.
The `PutRecordBatch` response includes a count of failed records,
`FailedPutCount`, and an array of responses, `RequestResponses`. Even if the
`PutRecordBatch` call succeeds, the value of `FailedPutCount` may be greater
than 0, indicating that there are records for which the operation didn't
succeed. Each entry in the `RequestResponses` array provides additional
information about the processed record. It directly correlates with a record in
the request array using the same ordering, from the top to the bottom. The
response array always includes the same number of records as the request array.
`RequestResponses` includes both successfully and unsuccessfully processed
records. Kinesis Data Firehose tries to process all records in each
`PutRecordBatch` request. A single record failure does not stop the processing
of subsequent records.
A successfully processed record includes a `RecordId` value, which is unique for
the record. An unsuccessfully processed record includes `ErrorCode` and
`ErrorMessage` values. `ErrorCode` reflects the type of error, and is one of the
following values: `ServiceUnavailableException` or `InternalFailure`.
`ErrorMessage` provides more detailed information about the error.
If there is an internal server error or a timeout, the write might have
completed or it might have failed. If `FailedPutCount` is greater than 0, retry
the request, resending only those records that might have failed processing.
This minimizes the possible duplicate records and also reduces the total bytes
sent (and corresponding charges). We recommend that you handle any duplicates at
the destination.
If `PutRecordBatch` throws `ServiceUnavailableException`, back off and retry. If
the exception persists, it is possible that the throughput limits have been
exceeded for the delivery stream.
Data records sent to Kinesis Data Firehose are stored for 24 hours from the time
they are added to a delivery stream as it attempts to send the records to the
destination. If the destination is unreachable for more than 24 hours, the data
is no longer available.
Don't concatenate two or more base64 strings to form the data fields of your
records. Instead, concatenate the raw data, then perform base64 encoding.
"""
def put_record_batch(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutRecordBatch", input, options)
end
@doc """
Enables server-side encryption (SSE) for the delivery stream.
This operation is asynchronous. It returns immediately. When you invoke it,
Kinesis Data Firehose first sets the encryption status of the stream to
`ENABLING`, and then to `ENABLED`. The encryption status of a delivery stream is
the `Status` property in `DeliveryStreamEncryptionConfiguration`. If the
operation fails, the encryption status changes to `ENABLING_FAILED`. You can
continue to read and write data to your delivery stream while the encryption
status is `ENABLING`, but the data is not encrypted. It can take up to 5 seconds
after the encryption status changes to `ENABLED` before all records written to
the delivery stream are encrypted. To find out whether a record or a batch of
records was encrypted, check the response elements `PutRecordOutput$Encrypted`
and `PutRecordBatchOutput$Encrypted`, respectively.
To check the encryption status of a delivery stream, use
`DescribeDeliveryStream`.
Even if encryption is currently enabled for a delivery stream, you can still
invoke this operation on it to change the ARN of the CMK or both its type and
ARN. If you invoke this method to change the CMK, and the old CMK is of type
`CUSTOMER_MANAGED_CMK`, Kinesis Data Firehose schedules the grant it had on the
old CMK for retirement. If the new CMK is of type `CUSTOMER_MANAGED_CMK`,
Kinesis Data Firehose creates a grant that enables it to use the new CMK to
encrypt and decrypt data and to manage the grant.
If a delivery stream already has encryption enabled and then you invoke this
operation to change the ARN of the CMK or both its type and ARN and you get
`ENABLING_FAILED`, this only means that the attempt to change the CMK failed. In
this case, encryption remains enabled with the old CMK.
If the encryption status of your delivery stream is `ENABLING_FAILED`, you can
invoke this operation again with a valid CMK. The CMK must be enabled and the
key policy mustn't explicitly deny the permission for Kinesis Data Firehose to
invoke KMS encrypt and decrypt operations.
You can enable SSE for a delivery stream only if it's a delivery stream that
uses `DirectPut` as its source.
The `StartDeliveryStreamEncryption` and `StopDeliveryStreamEncryption`
operations have a combined limit of 25 calls per delivery stream per 24 hours.
For example, you reach the limit if you call `StartDeliveryStreamEncryption` 13
times and `StopDeliveryStreamEncryption` 12 times for the same delivery stream
in a 24-hour period.
"""
def start_delivery_stream_encryption(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartDeliveryStreamEncryption", input, options)
end
@doc """
Disables server-side encryption (SSE) for the delivery stream.
This operation is asynchronous. It returns immediately. When you invoke it,
Kinesis Data Firehose first sets the encryption status of the stream to
`DISABLING`, and then to `DISABLED`. You can continue to read and write data to
your stream while its status is `DISABLING`. It can take up to 5 seconds after
the encryption status changes to `DISABLED` before all records written to the
delivery stream are no longer subject to encryption. To find out whether a
record or a batch of records was encrypted, check the response elements
`PutRecordOutput$Encrypted` and `PutRecordBatchOutput$Encrypted`, respectively.
To check the encryption state of a delivery stream, use
`DescribeDeliveryStream`.
If SSE is enabled using a customer managed CMK and then you invoke
`StopDeliveryStreamEncryption`, Kinesis Data Firehose schedules the related KMS
grant for retirement and then retires it after it ensures that it is finished
delivering records to the destination.
The `StartDeliveryStreamEncryption` and `StopDeliveryStreamEncryption`
operations have a combined limit of 25 calls per delivery stream per 24 hours.
For example, you reach the limit if you call `StartDeliveryStreamEncryption` 13
times and `StopDeliveryStreamEncryption` 12 times for the same delivery stream
in a 24-hour period.
"""
def stop_delivery_stream_encryption(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopDeliveryStreamEncryption", input, options)
end
@doc """
Adds or updates tags for the specified delivery stream.
A tag is a key-value pair that you can define and assign to AWS resources. If
you specify a tag that already exists, the tag value is replaced with the value
that you specify in the request. Tags are metadata. For example, you can add
friendly names and descriptions or other types of information that can help you
distinguish the delivery stream. For more information about tags, see [Using Cost Allocation
Tags](https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html)
in the *AWS Billing and Cost Management User Guide*.
Each delivery stream can have up to 50 tags.
This operation has a limit of five transactions per second per account.
"""
def tag_delivery_stream(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagDeliveryStream", input, options)
end
@doc """
Removes tags from the specified delivery stream.
Removed tags are deleted, and you can't recover them after this operation
successfully completes.
If you specify a tag that doesn't exist, the operation ignores it.
This operation has a limit of five transactions per second per account.
"""
def untag_delivery_stream(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagDeliveryStream", input, options)
end
@doc """
Updates the specified destination of the specified delivery stream.
Use this operation to change the destination type (for example, to replace the
Amazon S3 destination with Amazon Redshift) or change the parameters associated
with a destination (for example, to change the bucket name of the Amazon S3
destination). The update might not occur immediately. The target delivery stream
remains active while the configurations are updated, so data writes to the
delivery stream can continue during this process. The updated configurations are
usually effective within a few minutes.
Switching between Amazon ES and other services is not supported. For an Amazon
ES destination, you can only update to another Amazon ES destination.
If the destination type is the same, Kinesis Data Firehose merges the
configuration parameters specified with the destination configuration that
already exists on the delivery stream. If any of the parameters are not
specified in the call, the existing values are retained. For example, in the
Amazon S3 destination, if `EncryptionConfiguration` is not specified, then the
existing `EncryptionConfiguration` is maintained on the destination.
If the destination type is not the same, for example, changing the destination
from Amazon S3 to Amazon Redshift, Kinesis Data Firehose does not merge any
parameters. In this case, all parameters must be specified.
Kinesis Data Firehose uses `CurrentDeliveryStreamVersionId` to avoid race
conditions and conflicting merges. This is a required field, and the service
updates the configuration only if the existing configuration has a version ID
that matches. After the update is applied successfully, the version ID is
updated, and can be retrieved using `DescribeDeliveryStream`. Use the new
version ID to set `CurrentDeliveryStreamVersionId` in the next call.
"""
def update_destination(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateDestination", input, options)
end
end
| 52.242857 | 141 | 0.772218 |
ff44da1a25fc48c65dc8320ffef083d3bad20752 | 1,822 | ex | Elixir | clients/cloud_search/lib/google_api/cloud_search/v1/model/repository_error.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/cloud_search/lib/google_api/cloud_search/v1/model/repository_error.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/cloud_search/lib/google_api/cloud_search/v1/model/repository_error.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudSearch.V1.Model.RepositoryError do
@moduledoc """
Errors when the connector is communicating to the source repository.
## Attributes
* `errorMessage` (*type:* `String.t`, *default:* `nil`) - Message that describes the error. The maximum allowable length of the message is 8192 characters.
* `httpStatusCode` (*type:* `integer()`, *default:* `nil`) - Error codes. Matches the definition of HTTP status codes.
* `type` (*type:* `String.t`, *default:* `nil`) - Type of error.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:errorMessage => String.t(),
:httpStatusCode => integer(),
:type => String.t()
}
field(:errorMessage)
field(:httpStatusCode)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.CloudSearch.V1.Model.RepositoryError do
def decode(value, options) do
GoogleApi.CloudSearch.V1.Model.RepositoryError.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudSearch.V1.Model.RepositoryError do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.377358 | 159 | 0.720088 |
ff44e91a07117e14c3ddbbfe0e0be9e6ffdc994c | 1,210 | exs | Elixir | test/crawlie/page_test.exs | kianmeng/crawlie | 19883f17a208107927ba14d15312f5a908d5e8ea | [
"MIT"
] | 91 | 2016-12-29T12:31:14.000Z | 2021-09-25T23:09:34.000Z | test/crawlie/page_test.exs | kianmeng/crawlie | 19883f17a208107927ba14d15312f5a908d5e8ea | [
"MIT"
] | 40 | 2016-12-14T00:55:52.000Z | 2022-01-29T08:46:03.000Z | test/crawlie/page_test.exs | kianmeng/crawlie | 19883f17a208107927ba14d15312f5a908d5e8ea | [
"MIT"
] | 10 | 2017-04-06T11:18:10.000Z | 2021-10-30T00:04:09.000Z | defmodule Crawlie.PageTest do
use ExUnit.Case
alias Crawlie.Page
doctest Page
@url "https://foo.bar.baz/abc/def?x=y&y=z"
@uri URI.parse(@url)
@uri_a URI.parse("aaa")
@uri_b URI.parse("bbb")
test "constructor" do
assert Page.new(@uri) == %Page{uri: @uri, depth: 0, retries: 0}
assert Page.new(@url) == %Page{uri: @uri, depth: 0, retries: 0}
assert Page.new(@uri, 7) == %Page{uri: @uri, depth: 7, retries: 0}
end
test "Page structs compare by depth" do
assert Page.new("foo", 0) < Page.new("foo", 10)
assert Page.new("foo", 0) < Page.new("bar", 10)
assert %Page{uri: @uri_a, depth: 0, retries: 10} < %Page{uri: @uri_b, depth: 10, retries: 0}
end
test "Page.retry/1" do
p = %Page{uri: @uri_a, depth: 17, retries: 7}
assert Page.retry(p) == %Page{uri: @uri_a, depth: 17, retries: 8}
end
test "Page.child/2" do
p = %Page{uri: @uri_a, depth: 17, retries: 7}
assert Page.child(p, @uri_a) == %Page{uri: @uri_a, depth: 18, retries: 0}
end
test "Page.url/1" do
assert @url == Page.url(Page.new(@url))
end
test "Page constructor strips off the hash fragment" do
assert Page.new(@url) == Page.new(@url <> "#anchor")
end
end
| 26.304348 | 96 | 0.61405 |
ff44f9eb16b261a86b12edf20a44e444c4b69e7f | 600 | ex | Elixir | apps/aecore/lib/aecore/peers/worker/peer_connection_supervisor.ex | SingularityMatrix/elixir-node | ad126aa97931165185cf35454718ed2eee40ceed | [
"ISC"
] | 131 | 2018-03-10T01:35:56.000Z | 2021-12-27T13:44:41.000Z | apps/aecore/lib/aecore/peers/worker/peer_connection_supervisor.ex | SingularityMatrix/elixir-node | ad126aa97931165185cf35454718ed2eee40ceed | [
"ISC"
] | 445 | 2018-03-12T09:46:17.000Z | 2018-12-12T09:52:07.000Z | apps/aecore/lib/aecore/peers/worker/peer_connection_supervisor.ex | gspasov/dogs-blockchain | 884c14cfc98de2c3793a204da069630d090bbc90 | [
"0BSD"
] | 23 | 2018-03-12T12:01:28.000Z | 2022-03-06T09:22:17.000Z | defmodule Aecore.Peers.Worker.PeerConnectionSupervisor do
@moduledoc """
Supervises the individual peer connection GenServer processes
"""
use DynamicSupervisor
alias Aecore.Peers.PeerConnection
def start_link(_args) do
DynamicSupervisor.start_link(__MODULE__, :ok, name: __MODULE__)
end
def start_peer_connection(conn_info) do
DynamicSupervisor.start_child(
__MODULE__,
Supervisor.child_spec(
{PeerConnection, conn_info},
restart: :temporary
)
)
end
def init(:ok) do
DynamicSupervisor.init(strategy: :one_for_one)
end
end
| 21.428571 | 67 | 0.723333 |
ff45051f7dc10740ee5f96f4a7ee658ff9bc5ce2 | 556 | ex | Elixir | lib/shippo/client/hackney.ex | christopherlai/shippo | 33d62242a5c3ad1d935888150d5cd630404d91f3 | [
"Unlicense",
"MIT"
] | null | null | null | lib/shippo/client/hackney.ex | christopherlai/shippo | 33d62242a5c3ad1d935888150d5cd630404d91f3 | [
"Unlicense",
"MIT"
] | null | null | null | lib/shippo/client/hackney.ex | christopherlai/shippo | 33d62242a5c3ad1d935888150d5cd630404d91f3 | [
"Unlicense",
"MIT"
] | null | null | null | defmodule Shippo.Client.Hackney do
@moduledoc """
`hackney` client that implements the `Shippo.Client` behaviour.
"""
@behaviour Shippo.Client
@impl true
def request(method, url, headers, body, opts \\ []) do
case :hackney.request(method, url, headers, body, [:with_body] ++ opts) do
{:ok, status, _headers, body} ->
{:ok, %{status_code: status, body: body}}
{:ok, status, _headers} ->
{:ok, %{status_code: status, body: "{}"}}
{:error, reason} ->
{:error, %{reason: reason}}
end
end
end
| 25.272727 | 78 | 0.595324 |
ff45100611f8000f71684585b006aed3b9139b61 | 2,599 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/client_user.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/client_user.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/client_user.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.AdExchangeBuyer.V2beta1.Model.ClientUser do
@moduledoc """
A client user is created under a client buyer and has restricted access to
the Marketplace and certain other sections of the Authorized Buyers UI based
on the role granted to the associated client buyer.
The only way a new client user can be created is via accepting an
email invitation
(see the
accounts.clients.invitations.create
method).
All fields are required unless otherwise specified.
## Attributes
* `clientAccountId` (*type:* `String.t`, *default:* `nil`) - Numerical account ID of the client buyer
with which the user is associated; the
buyer must be a client of the current sponsor buyer.
The value of this field is ignored in an update operation.
* `email` (*type:* `String.t`, *default:* `nil`) - User's email address. The value of this field
is ignored in an update operation.
* `status` (*type:* `String.t`, *default:* `nil`) - The status of the client user.
* `userId` (*type:* `String.t`, *default:* `nil`) - The unique numerical ID of the client user
that has accepted an invitation.
The value of this field is ignored in an update operation.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:clientAccountId => String.t(),
:email => String.t(),
:status => String.t(),
:userId => String.t()
}
field(:clientAccountId)
field(:email)
field(:status)
field(:userId)
end
defimpl Poison.Decoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.ClientUser do
def decode(value, options) do
GoogleApi.AdExchangeBuyer.V2beta1.Model.ClientUser.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.ClientUser do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.097222 | 105 | 0.716045 |
ff4513359c32188a794f8be7dd3dbad9df65b17a | 1,797 | ex | Elixir | lib/is/validators/list.ex | bydooweedoo/is | 2d6acf61b397f7297d42a83b09c1181e22cb5230 | [
"MIT"
] | 17 | 2018-06-23T11:16:17.000Z | 2021-11-17T18:28:37.000Z | lib/is/validators/list.ex | bydooweedoo/is | 2d6acf61b397f7297d42a83b09c1181e22cb5230 | [
"MIT"
] | null | null | null | lib/is/validators/list.ex | bydooweedoo/is | 2d6acf61b397f7297d42a83b09c1181e22cb5230 | [
"MIT"
] | 2 | 2018-10-24T20:12:54.000Z | 2019-12-25T17:45:41.000Z | defmodule Is.Validators.List do
@moduledoc ~S"""
Validation for list.
## Examples
iex> Is.validate([], :list)
[]
iex> Is.validate([], list: false)
[{:error, [], "must not be a list"}]
iex> Is.validate(["a", "b", "c"], list: :binary)
[]
iex> Is.validate(%{value: ["a", true, "c"]}, map: %{value: [list: [or: [:binary, :boolean]]]})
[]
iex> Is.validate(%{value: ["a", true, 1]}, map: %{value: [list: [or: [:binary, :boolean]]]})
[{:error, [:value, 2], "must satisfies at least one of conditions [:binary, :boolean]"}]
iex> Is.validate(["a", 1, 2], list: [binary: false])
[{:error, [0], "must not be a binary"}]
iex> Is.validate(["a", "b", :c], list: :binary)
[{:error, [2], "must be a binary"}]
iex> Is.validate([%{ok: true}, %{ok: true}, %{ok: false}], list: [map: %{ok: [equals: true]}])
[{:error, [2, :ok], "must equals true"}]
iex> Is.validate("test", unknown: true)
{:error, "Validator :unknown does not exist"}
"""
def validate(data, is) when is_boolean(is) do
case is_list(data) === is do
true -> :ok
false when is === true -> {:error, "must be a list"}
false when is === false -> {:error, "must not be a list"}
end
end
def validate(values, schema) when is_list(values) do
Enum.reduce(values, {0, []}, fn(value, {index, acc}) ->
{index + 1, acc ++ Is.validate(value, schema, [index], true)}
end)
|> elem(1)
end
def validate(_values, _schema) do
{:error, "must be a list"}
end
end
# defimpl Is.AliasType, for: List do
# def get([]) do
# {:ok, :list}
# end
# def get([options]) do
# {:ok, :list, options}
# end
# def get(options) do
# {:ok, :list, [{:or, options}]}
# end
# end
| 26.426471 | 100 | 0.531441 |
ff4522fe7815d91f4c5f25ca8dcd76eccf7c8257 | 2,342 | ex | Elixir | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/permission.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/permission.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/permission.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AndroidEnterprise.V1.Model.Permission do
@moduledoc """
A Permissions resource represents some extra capability, to be granted to an Android app, which requires explicit consent. An enterprise admin must consent to these permissions on behalf of their users before an entitlement for the app can be created.
The permissions collection is read-only. The information provided for each permission (localized name and description) is intended to be used in the MDM user interface when obtaining consent from the enterprise.
## Attributes
* `description` (*type:* `String.t`, *default:* `nil`) - A longer description of the Permissions resource, giving more details of what it affects.
* `kind` (*type:* `String.t`, *default:* `androidenterprise#permission`) -
* `name` (*type:* `String.t`, *default:* `nil`) - The name of the permission.
* `permissionId` (*type:* `String.t`, *default:* `nil`) - An opaque string uniquely identifying the permission.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:description => String.t(),
:kind => String.t(),
:name => String.t(),
:permissionId => String.t()
}
field(:description)
field(:kind)
field(:name)
field(:permissionId)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidEnterprise.V1.Model.Permission do
def decode(value, options) do
GoogleApi.AndroidEnterprise.V1.Model.Permission.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidEnterprise.V1.Model.Permission do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.37931 | 253 | 0.72801 |
ff455417cccd72208faf868173353567ab07aaa7 | 608 | ex | Elixir | lib/exredis/connection_string.ex | aforward-oss/exredis | cf9e72e971231eaf1e99c422bd84bbb40215c84b | [
"MIT"
] | null | null | null | lib/exredis/connection_string.ex | aforward-oss/exredis | cf9e72e971231eaf1e99c422bd84bbb40215c84b | [
"MIT"
] | null | null | null | lib/exredis/connection_string.ex | aforward-oss/exredis | cf9e72e971231eaf1e99c422bd84bbb40215c84b | [
"MIT"
] | null | null | null | defmodule Exredis.ConnectionString do
defmodule Config do
defstruct host: nil, port: nil, password: nil, db: nil
end
def parse(connection_string) do
uri = URI.parse(connection_string)
%Config{
host: uri.host,
port: uri.port,
password: uri.userinfo |> parse_password,
db: uri.path |> parse_db
}
end
defp parse_db(nil), do: 0
defp parse_db(path) do
path |> String.split("/") |> Enum.at(1) |> String.to_integer
end
defp parse_password(nil), do: ""
defp parse_password(auth) do
auth |> String.split(":") |> Enum.at(1)
end
end
| 22.518519 | 64 | 0.626645 |
ff45552e52b071081d3eef4c157c02e9591b7722 | 4,270 | exs | Elixir | test/sampler_test.exs | alejandrodnm/elixir_agent | b2264d8f54244d136950cadd02f2533967a8e7cb | [
"Apache-2.0"
] | null | null | null | test/sampler_test.exs | alejandrodnm/elixir_agent | b2264d8f54244d136950cadd02f2533967a8e7cb | [
"Apache-2.0"
] | 1 | 2019-02-08T01:08:31.000Z | 2019-02-20T00:24:51.000Z | test/sampler_test.exs | alejandrodnm/elixir_agent | b2264d8f54244d136950cadd02f2533967a8e7cb | [
"Apache-2.0"
] | null | null | null | defmodule SamplerTest do
use ExUnit.Case
alias NewRelic.Harvest.Collector
defmodule TestProcess do
use GenServer
def start_link, do: GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
def init(:ok) do
NewRelic.sample_process()
{:ok, %{}}
end
def handle_call(:work, _from, state) do
{:reply, fib(10), state}
end
def fib(0), do: 0
def fib(1), do: 1
def fib(n), do: fib(n - 1) + fib(n - 2)
end
test "Beam stats Sampler" do
TestHelper.restart_harvest_cycle(Collector.CustomEvent.HarvestCycle)
TestHelper.restart_harvest_cycle(Collector.Metric.HarvestCycle)
TestHelper.trigger_report(NewRelic.Sampler.Beam)
events = TestHelper.gather_harvest(Collector.CustomEvent.Harvester)
metrics = TestHelper.gather_harvest(Collector.Metric.Harvester)
assert Enum.find(events, fn [_, event, _] ->
event[:category] == :BeamStat && event[:reductions] > 0 && event[:process_count] > 0
end)
[%{name: "Memory/Physical"}, [_, mb, _, _, _, _]] =
TestHelper.find_metric(metrics, "Memory/Physical")
assert 5 < mb
assert mb < 100
assert [%{name: "CPU/User Time"}, [_, cpu, _, _, _, _]] =
TestHelper.find_metric(metrics, "CPU/User Time")
assert cpu > 0
end
test "Process Sampler" do
TestHelper.restart_harvest_cycle(Collector.CustomEvent.HarvestCycle)
TestProcess.start_link()
TestHelper.trigger_report(NewRelic.Sampler.Process)
events = TestHelper.gather_harvest(Collector.CustomEvent.Harvester)
assert Enum.find(events, fn [_, event, _] ->
event[:category] == :ProcessSample && event[:name] == "SamplerTest.TestProcess" &&
event[:message_queue_length] == 0
end)
end
test "unnamed Process Sampler" do
TestHelper.restart_harvest_cycle(Collector.CustomEvent.HarvestCycle)
parent = self()
spawn(fn ->
NewRelic.sample_process()
TestHelper.trigger_report(NewRelic.Sampler.Process)
send(parent, :continue)
end)
assert_receive :continue, 500
events = TestHelper.gather_harvest(Collector.CustomEvent.Harvester)
assert Enum.find(events, fn [_, event, _] ->
event[:category] == :ProcessSample && event[:name] =~ "PID" &&
event[:message_queue_length] == 0
end)
end
test "Process Sampler - count work between samplings" do
TestProcess.start_link()
TestHelper.restart_harvest_cycle(Collector.CustomEvent.HarvestCycle)
TestHelper.trigger_report(NewRelic.Sampler.Process)
events = TestHelper.gather_harvest(Collector.CustomEvent.Harvester)
[_, %{reductions: first_reductions}, _] =
Enum.find(events, fn [_, event, _] ->
event[:category] == :ProcessSample && event[:name] == "SamplerTest.TestProcess"
end)
TestHelper.restart_harvest_cycle(Collector.CustomEvent.HarvestCycle)
GenServer.call(TestProcess, :work)
TestHelper.trigger_report(NewRelic.Sampler.Process)
events = TestHelper.gather_harvest(Collector.CustomEvent.Harvester)
[_, %{reductions: second_reductions}, _] =
Enum.find(events, fn [_, event, _] ->
event[:category] == :ProcessSample && event[:name] == "SamplerTest.TestProcess"
end)
assert second_reductions > first_reductions
end
describe "Sampler.ETS" do
test "records metrics on ETS tables" do
TestHelper.restart_harvest_cycle(Collector.CustomEvent.HarvestCycle)
:ets.new(:test_table, [:named_table])
for n <- 1..510, do: :ets.insert(:test_table, {n, "BAR"})
TestHelper.trigger_report(NewRelic.Sampler.Ets)
events = TestHelper.gather_harvest(Collector.CustomEvent.Harvester)
assert Enum.find(events, fn [_, event, _] ->
event[:category] == :EtsStat && event[:table_name] == ":test_table" &&
event[:size] == 510
end)
end
test "record_sample/1 ignores non-existent tables" do
assert NewRelic.Sampler.Ets.record_sample(:nope_not_here) == :ignore
end
end
test "detect the processes which are top consumers" do
top_procs = NewRelic.Sampler.TopProcess.detect_top_processes()
assert length(top_procs) >= 5
assert length(top_procs) <= 10
end
end
| 30.5 | 97 | 0.67096 |
ff4555eebba53ac66a2c6e8b1f389159874b8433 | 2,074 | exs | Elixir | test/media_server_web/live/watch_movie_live_test.exs | midarrlabs/midarr-server | f12c6347e41a96517bbb5ed1ad12b65d10b8d30a | [
"MIT"
] | 538 | 2022-02-02T21:46:52.000Z | 2022-03-29T20:50:34.000Z | test/media_server_web/live/watch_movie_live_test.exs | midarrlabs/midarr-server | f12c6347e41a96517bbb5ed1ad12b65d10b8d30a | [
"MIT"
] | 48 | 2022-02-03T11:46:09.000Z | 2022-03-31T04:44:53.000Z | test/media_server_web/live/watch_movie_live_test.exs | midarrlabs/midarr-server | f12c6347e41a96517bbb5ed1ad12b65d10b8d30a | [
"MIT"
] | 15 | 2022-02-03T05:55:14.000Z | 2022-02-28T11:09:03.000Z | defmodule MediaServerWeb.WatchMovieLiveTest do
use MediaServerWeb.ConnCase
import Phoenix.LiveViewTest
alias MediaServer.AccountsFixtures
alias MediaServer.MoviesFixtures
alias MediaServer.ContinuesFixtures
alias MediaServer.ComponentsFixtures
alias MediaServer.ActionsFixtures
defp create_fixtures(_) do
ComponentsFixtures.action_fixture()
%{user: AccountsFixtures.user_fixture()}
end
describe "Show page" do
setup [:create_fixtures]
test "it can watch", %{conn: conn, user: user} do
conn =
post(conn, Routes.user_session_path(conn, :create), %{
"user" => %{"email" => user.email, "password" => AccountsFixtures.valid_user_password()}
})
movie = MoviesFixtures.get_movie()
{:ok, view, _html} = live(conn, Routes.watch_movie_show_path(conn, :show, movie["id"]))
render_hook(view, :movie_played)
assert Enum.count(ActionsFixtures.get_movie_played()) === 1
end
test "it has continue", %{conn: conn, user: user} do
conn =
post(conn, Routes.user_session_path(conn, :create), %{
"user" => %{"email" => user.email, "password" => AccountsFixtures.valid_user_password()}
})
movie = MoviesFixtures.get_movie()
{:ok, view, _html} = live(conn, Routes.watch_movie_show_path(conn, :show, movie["id"]))
render_hook(view, :movie_destroyed, %{
current_time: 89,
duration: 100
})
assert ContinuesFixtures.get_movie_continue()
end
test "it does not have continue", %{conn: conn, user: user} do
conn =
post(conn, Routes.user_session_path(conn, :create), %{
"user" => %{"email" => user.email, "password" => AccountsFixtures.valid_user_password()}
})
movie = MoviesFixtures.get_movie()
{:ok, view, _html} = live(conn, Routes.watch_movie_show_path(conn, :show, movie["id"]))
render_hook(view, :movie_destroyed, %{
current_time: 90,
duration: 100
})
refute ContinuesFixtures.get_movie_continue()
end
end
end
| 28.805556 | 98 | 0.653809 |
ff456ef78f664469eb5936742d4badb3fa29d2de | 1,033 | ex | Elixir | test/support/channel_case.ex | tsara27/collab-x-phoenix | 828f8fbdcf853a43e096a42dc2f003cf443eb792 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | tsara27/collab-x-phoenix | 828f8fbdcf853a43e096a42dc2f003cf443eb792 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | tsara27/collab-x-phoenix | 828f8fbdcf853a43e096a42dc2f003cf443eb792 | [
"MIT"
] | null | null | null | defmodule CollabXPhoenix.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
alias CollabXPhoenix.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
# The default endpoint for testing
@endpoint CollabXPhoenix.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(CollabXPhoenix.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(CollabXPhoenix.Repo, {:shared, self()})
end
:ok
end
end
| 23.477273 | 76 | 0.710552 |
ff4576d8be12e5225e12d9858cfcc568fe276e04 | 183 | ex | Elixir | lib/datastore/dts/key.ex | sdipendra/ecto-gcd | 218485f251ec3bb372854fd2ed61646710ef2eb6 | [
"Apache-2.0"
] | 1 | 2021-05-04T17:57:37.000Z | 2021-05-04T17:57:37.000Z | lib/datastore/dts/key.ex | sdipendra/ecto-gcd | 218485f251ec3bb372854fd2ed61646710ef2eb6 | [
"Apache-2.0"
] | null | null | null | lib/datastore/dts/key.ex | sdipendra/ecto-gcd | 218485f251ec3bb372854fd2ed61646710ef2eb6 | [
"Apache-2.0"
] | null | null | null | defmodule Datastore.Dts.Key do
@moduledoc false
@derive {Jason.Encoder, only: [:partitionId, :path]}
@enforce_keys [:partitionId, :path]
defstruct [:partitionId, :path]
end
| 20.333333 | 54 | 0.715847 |
ff45a141aec8b93003521ca856c2e02d0eb70c5d | 2,568 | exs | Elixir | 24/part1.exs | seantanly/elixir-advent_of_code | 1e39ac46bc01f5c8cffd2d2f79f9af0b71767291 | [
"MIT"
] | 3 | 2016-01-18T01:14:45.000Z | 2017-05-11T09:14:49.000Z | 24/part1.exs | seantanly/elixir-advent_of_code | 1e39ac46bc01f5c8cffd2d2f79f9af0b71767291 | [
"MIT"
] | null | null | null | 24/part1.exs | seantanly/elixir-advent_of_code | 1e39ac46bc01f5c8cffd2d2f79f9af0b71767291 | [
"MIT"
] | null | null | null | defmodule Combination do
def combine(collection, k) when is_integer(k) and k >= 0 do
list = Enum.to_list(collection)
list_length = Enum.count(list)
if k > list_length do
raise Enum.OutOfBoundsError
else
do_combine(list, list_length, k, [], [])
end
end
defp do_combine(_list, _list_length, 0, _pick_acc, _acc), do: [[]]
defp do_combine(list, _list_length, 1, _pick_acc, _acc), do: list |> Enum.map(&([&1])) # optimization
defp do_combine(list, list_length, k, pick_acc, acc) do
list
|> Stream.unfold(fn [h | t] -> {{h, t}, t} end)
|> Enum.take(list_length)
|> Enum.reduce(acc, fn {x, sublist}, acc ->
sublist_length = Enum.count(sublist)
pick_acc_length = Enum.count(pick_acc)
if k > pick_acc_length + 1 + sublist_length do
acc # insufficient elements in sublist to generate new valid combinations
else
new_pick_acc = [x | pick_acc]
new_pick_acc_length = pick_acc_length + 1
case new_pick_acc_length do
^k -> [new_pick_acc | acc]
_ -> do_combine(sublist, sublist_length, k, new_pick_acc, acc)
end
end
end)
end
end
# Calculating the exact configurations of all groups of packages isn't the goal of the question.
# To save computation time, we can compute for only the first group.
# The observation is, if the entire packages collection's weight can be equally split into N groups,
# there won't exist a combination whereby a group is formed, which causes the remaining elements to unable to form
# into equal groups as well.
defmodule M do
def qe(pkg), do: Enum.reduce(pkg, 1, &(&1 * &2))
def find_first_group(packages, group_count) do
group_weight = Enum.sum(packages) |> div(group_count)
max_grp_length = Enum.count(packages) |> div(group_count)
Enum.reduce_while(1..max_grp_length, nil, fn i, acc ->
Combination.combine(packages, i)
|> Enum.filter(&(Enum.sum(&1) == group_weight))
|> Enum.reject(&(acc && qe(acc) < qe(&1)))
|> Enum.sort_by(&qe/1)
|> Enum.at(0)
|> case do
nil -> {:cont, acc}
p1 -> {:halt, [p1]}
end
end)
end
end
result =
Enum.to_list(1..5) ++ Enum.to_list(7..11)
|> M.find_first_group(3)
|> IO.inspect
|> Enum.at(0)
|> M.qe
|> IO.inspect
^result = 99
result =
Path.join(__DIR__, "input.txt")
|> File.read!
|> String.split("\n", trim: true)
|> Enum.map(&String.to_integer/1)
|> M.find_first_group(3)
|> IO.inspect
|> Enum.at(0)
|> M.qe
|> IO.inspect
^result = 11266889531
| 30.571429 | 114 | 0.63824 |
ff45bbfcdeacf1a05b5f2883cf7cac3376959300 | 844 | ex | Elixir | lib/sue/commands/images.ex | alwayswimmin/Sue | 33dfd860e7d5b6dce11e2dc202924efad6a9474c | [
"MIT"
] | 1 | 2020-06-21T01:50:12.000Z | 2020-06-21T01:50:12.000Z | lib/sue/commands/images.ex | alwayswimmin/Sue | 33dfd860e7d5b6dce11e2dc202924efad6a9474c | [
"MIT"
] | null | null | null | lib/sue/commands/images.ex | alwayswimmin/Sue | 33dfd860e7d5b6dce11e2dc202924efad6a9474c | [
"MIT"
] | null | null | null | defmodule Sue.Commands.Images do
Module.register_attribute(__MODULE__, :is_persisted, persist: true)
@is_persisted "is persisted"
alias Sue.Models.Attachment
@media_path Path.join(:code.priv_dir(:sue), "media/")
@doc """
Shows a picture of a cute doog.
Usage: !doog
"""
def c_doog(_msg) do
%Attachment{filename: Path.join(@media_path, "korone.JPG")}
end
@doc """
Snap!
"""
def c_cringe(_msg), do: random_image_from_dir("cringe/")
@doc """
Sends a cute photo.
"""
def c_qt(_msg), do: random_image_from_dir("qt/")
@spec random_image_from_dir(String.t()) :: Attachment.t()
defp random_image_from_dir(dir) do
path = Path.join(@media_path, dir)
path
|> File.ls!()
|> Enum.random()
|> (fn image ->
%Attachment{filename: Path.join(path, image)}
end).()
end
end
| 22.210526 | 69 | 0.643365 |
ff45c02d1befe960b7903f2a8cfb63821537d1c6 | 580 | ex | Elixir | lib/cforum/messages/message_tag.ex | campingrider/cforum_ex | cf27684c47d6dc26c9c37a946f1c729a79d27c70 | [
"MIT"
] | null | null | null | lib/cforum/messages/message_tag.ex | campingrider/cforum_ex | cf27684c47d6dc26c9c37a946f1c729a79d27c70 | [
"MIT"
] | null | null | null | lib/cforum/messages/message_tag.ex | campingrider/cforum_ex | cf27684c47d6dc26c9c37a946f1c729a79d27c70 | [
"MIT"
] | null | null | null | defmodule Cforum.Messages.Tags.MessageTag do
use CforumWeb, :model
@primary_key {:message_tag_id, :id, autogenerate: true}
@derive {Phoenix.Param, key: :message_tag_id}
schema "messages_tags" do
belongs_to(:message, Cforum.Messages.Message, references: :message_id)
belongs_to(:tag, Cforum.Messages.Tag, references: :tag_id)
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:message_id, :tag_id])
|> validate_required([:message_id, :tag_id])
end
end
| 27.619048 | 74 | 0.694828 |
ff45cca871ffe0e3400ba0afc3ff3a19b1706d1f | 4,318 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/api/performance_report.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/api/performance_report.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/api/performance_report.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AdExchangeBuyer.V14.Api.PerformanceReport do
@moduledoc """
API calls for all endpoints tagged `PerformanceReport`.
"""
alias GoogleApi.AdExchangeBuyer.V14.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Retrieves the authenticated user's list of performance metrics.
## Parameters
* `connection` (*type:* `GoogleApi.AdExchangeBuyer.V14.Connection.t`) - Connection to server
* `account_id` (*type:* `String.t`) - The account id to get the reports.
* `end_date_time` (*type:* `String.t`) - The end time of the report in ISO 8601 timestamp format using UTC.
* `start_date_time` (*type:* `String.t`) - The start time of the report in ISO 8601 timestamp format using UTC.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:maxResults` (*type:* `integer()`) - Maximum number of entries returned on one result page. If not set, the default is 100. Optional.
* `:pageToken` (*type:* `String.t`) - A continuation token, used to page through performance reports. To retrieve the next page, set this parameter to the value of "nextPageToken" from the previous response. Optional.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AdExchangeBuyer.V14.Model.PerformanceReportList{}}` on success
* `{:error, info}` on failure
"""
@spec adexchangebuyer_performance_report_list(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.AdExchangeBuyer.V14.Model.PerformanceReportList.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def adexchangebuyer_performance_report_list(
connection,
account_id,
end_date_time,
start_date_time,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:maxResults => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/performancereport", %{})
|> Request.add_param(:query, :accountId, account_id)
|> Request.add_param(:query, :endDateTime, end_date_time)
|> Request.add_param(:query, :startDateTime, start_date_time)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.AdExchangeBuyer.V14.Model.PerformanceReportList{}]
)
end
end
| 42.333333 | 225 | 0.660028 |
ff4643ef804a4eb4ad7699827c4d0c0652f19277 | 20,349 | exs | Elixir | test/case_test.exs | X4lldux/disc_union | e5b5a2d775a0536fa492c311bc297f71b0a5980d | [
"MIT"
] | 58 | 2016-05-14T03:56:22.000Z | 2019-04-18T14:45:27.000Z | test/case_test.exs | X4lldux/disc_union | e5b5a2d775a0536fa492c311bc297f71b0a5980d | [
"MIT"
] | 5 | 2016-08-30T19:47:33.000Z | 2017-02-04T22:50:40.000Z | test/case_test.exs | x4lldux/disc_union | e5b5a2d775a0536fa492c311bc297f71b0a5980d | [
"MIT"
] | 2 | 2017-02-03T15:39:23.000Z | 2018-11-21T15:30:59.000Z | defmodule DiscUnionTest.Case do
use ExUnit.Case, async: true
test "discriminated union's `case` macro should riase when condition is not evaluated to this discriminated union" do
assert_raise BadStructError, "expected a struct named ExampleDU, got: nil", fn ->
use ExampleDU
ExampleDU.case nil do
Asd -> :asd
Qwe in _ -> :qwe
Rty in _, _ -> :rty
end
end
assert_raise BadStructError, "expected a struct named ExampleDUa, got: nil", fn ->
use ExampleDUa
ExampleDUa.case nil do
:asd -> :asd
:qwe in _ -> :qwe
:rty in _, _ -> :rty
end
end
end
test "discriminated union's `case` macro accepts the `in` format for case arguments" do
use ExampleDU
x = ExampleDU.from Asd
res = ExampleDU.case x do
Asd -> :asd
Qwe in _ -> :qwe
Rty in _, _ -> :rty
end
assert res == :asd
x = ExampleDU.from {Qwe, 1}
res = ExampleDU.case x do
Asd -> :asd
Qwe in _ -> :qwe
Rty in _, _ -> :rty
end
assert res == :qwe
x = ExampleDU.from {Rty, 1, 1}
res = ExampleDU.case x do
Asd -> :asd
Qwe in _ -> :qwe
Rty in _, _ -> :rty
end
assert res == :rty
# tests for pure atoms
use ExampleDUa
x = ExampleDUa.from :asd
res = ExampleDUa.case x do
:asd -> :asd
:qwe in _ -> :qwe
:rty in _, _ -> :rty
end
assert res == :asd
x = ExampleDUa.from {:qwe, 1}
res = ExampleDUa.case x do
:asd -> :asd
:qwe in _ -> :qwe
:rty in _, _ -> :rty
end
assert res == :qwe
x = ExampleDUa.from {:rty, 1, 1}
res = ExampleDUa.case x do
:asd -> :asd
:qwe in _ -> :qwe
:rty in _, _ -> :rty
end
assert res == :rty
end
test "discriminated union's `case` macro accepts the tuple format for case arguments" do
use ExampleDU
x = ExampleDU.from Asd
res = ExampleDU.case x do
Asd -> :asd
{Qwe, _} -> :qwe
{Rty, _, _} -> :rty
end
assert res == :asd
x = ExampleDU.from {Qwe, 1}
res = ExampleDU.case x do
Asd -> :asd
{Qwe, _} -> :qwe
{Rty, _, _} -> :rty
end
assert res == :qwe
x = ExampleDU.from {Rty, 1, 1}
res = ExampleDU.case x do
Asd -> :asd
{Qwe, _} -> :qwe
{Rty, _, _} -> :rty
end
assert res == :rty
# tests for pure atoms
use ExampleDUa
x = ExampleDUa.from :asd
res = ExampleDUa.case x do
:asd -> :asd
{:qwe, _} -> :qwe
{:rty, _, _} -> :rty
end
assert res == :asd
x = ExampleDUa.from {:qwe, 1}
res = ExampleDUa.case x do
:asd -> :asd
{:qwe, _} -> :qwe
{:rty, _, _} -> :rty
end
assert res == :qwe
x = ExampleDUa.from {:rty, 1, 1}
res = ExampleDUa.case x do
:asd -> :asd
{:qwe, _} -> :qwe
{:rty, _, _} -> :rty
end
assert res == :rty
end
test "discriminated union's `case` macro cases can have a pattern match for whole case expression in the `in` format for case arguments" do
use ExampleDU
c = Asd
x = ExampleDU.from! c
res = ExampleDU.case x do
z = Asd -> z
z = Qwe in _ -> z
z = Rty in _, _ -> z
end
assert res == c
c = {Qwe, 1}
x = ExampleDU.from! c
res = ExampleDU.case x do
z = Asd -> z
z = Qwe in _ -> z
z = Rty in _, _ -> z
end
assert res == c
c = {Rty, 1, 1}
x = ExampleDU.from! c
res = ExampleDU.case x do
z = Asd -> z
z = Qwe in _ -> z
z = Rty in _, _ -> z
end
assert res == c
# tests for pure atoms
use ExampleDUa
c = :asd
x = ExampleDUa.from! c
res = ExampleDUa.case x do
z = :asd -> z
z = :qwe in _ -> z
z = :rty in _, _ -> z
end
assert res == c
c = {:qwe, 1}
x = ExampleDUa.from! c
res = ExampleDUa.case x do
z = :asd -> z
z = :qwe in _ -> z
z = :rty in _, _ -> z
end
assert res == c
c = {:rty, 1, 1}
x = ExampleDUa.from! c
res = ExampleDUa.case x do
z = :asd -> z
z = :qwe in _ -> z
z = :rty in _, _ -> z
end
assert res == c
end
test "discriminated union's `case` macro cases can have a pattern match for whole case expression in the tuple format for case arguments" do
use ExampleDU
c = Asd
x = ExampleDU.from! c
res = ExampleDU.case x do
z = Asd -> z
z = {Qwe, _} -> z
z = {Rty, _, _} -> z
end
assert res == c
c = {Qwe, 1}
x = ExampleDU.from! c
res = ExampleDU.case x do
z = Asd -> z
z = {Qwe, _} -> z
z = {Rty, _, _} -> z
end
assert res == c
c = {Rty, 1, 1}
x = ExampleDU.from! c
res = ExampleDU.case x do
z = Asd -> z
z = {Qwe, _} -> z
z = {Rty, _, _} -> z
end
assert res == c
# tests for pure atoms
use ExampleDUa
c = :asd
x = ExampleDUa.from! c
res = ExampleDUa.case x do
z = :asd -> z
z = {:qwe, _} -> z
z = {:rty, _, _} -> z
end
assert res == c
c = {:qwe, 1}
x = ExampleDUa.from! c
res = ExampleDUa.case x do
z = :asd -> z
z = {:qwe, _} -> z
z = {:rty, _, _} -> z
end
assert res == c
c = {:rty, 1, 1}
x = ExampleDUa.from! c
res = ExampleDUa.case x do
z = :asd -> z
z = {:qwe, _} -> z
z = {:rty, _, _} -> z
end
assert res == c
end
test "discriminated union's `case` macro accepts the `in` format for case arguments with guard present" do
use ExampleDU
x = ExampleDU.from Asd
res = ExampleDU.case x do
Asd -> :asd
Qwe in x when x > 0 -> :qwe
Rty in x, _ when x > 0 -> :rty
end
assert res == :asd
x = ExampleDU.from {Qwe, 1}
res = ExampleDU.case x do
Asd -> :asd
Qwe in x when x > 0 -> :qwe
Rty in x, _ when x > 0 -> :rty
end
assert res == :qwe
x = ExampleDU.from {Rty, 1, 1}
res = ExampleDU.case x do
Asd -> :asd
Qwe in x when x > 0 -> :qwe
Rty in x, _ when x > 0 -> :rty
end
assert res == :rty
# tests for pure atoms
use ExampleDUa
x = ExampleDUa.from :asd
res = ExampleDUa.case x do
:asd -> :asd
:qwe in x when x > 0 -> :qwe
:rty in x, _ when x > 0 -> :rty
end
assert res == :asd
x = ExampleDUa.from {:qwe, 1}
res = ExampleDUa.case x do
:asd -> :asd
:qwe in x when x > 0 -> :qwe
:rty in x, _ when x > 0 -> :rty
end
assert res == :qwe
x = ExampleDUa.from {:rty, 1, 1}
res = ExampleDUa.case x do
:asd -> :asd
:qwe in x when x > 0 -> :qwe
:rty in x, _ when x > 0 -> :rty
end
assert res == :rty
end
test "discriminated union's `case` macro accepts the tuple format for case arguments with guard present" do
use ExampleDU
x = ExampleDU.from Asd
res = ExampleDU.case x do
Asd -> :asd
{Qwe, x} when x > 0 -> :qwe
{Rty, x, _} when x > 0 -> :rty
end
assert res == :asd
x = ExampleDU.from {Qwe, 1}
res = ExampleDU.case x do
Asd -> :asd
{Qwe, x} when x > 0 -> :qwe
{Rty, x, _} when x > 0 -> :rty
end
assert res == :qwe
x = ExampleDU.from {Rty, 1, 1}
res = ExampleDU.case x do
Asd -> :asd
{Qwe, x} when x > 0 -> :qwe
{Rty, x, _} when x > 0 -> :rty
end
assert res == :rty
# tests for pure atoms
use ExampleDUa
x = ExampleDUa.from :asd
res = ExampleDUa.case x do
:asd -> :asd
{:qwe, x} when x > 0 -> :qwe
{:rty, x, _} when x > 0 -> :rty
end
assert res == :asd
x = ExampleDUa.from {:qwe, 1}
res = ExampleDUa.case x do
:asd -> :asd
{:qwe, x} when x > 0 -> :qwe
{:rty, x, _} when x > 0 -> :rty
end
assert res == :qwe
x = ExampleDUa.from {:rty, 1, 1}
res = ExampleDUa.case x do
:asd -> :asd
{:qwe, x} when x > 0 -> :qwe
{:rty, x, _} when x > 0 -> :rty
end
assert res == :rty
end
test "discriminated union's `case` macro cases can have a pattern match for whole case expression in the `in` format for case arguments with guard present" do
use ExampleDU
c = Asd
x = ExampleDU.from! c
res = ExampleDU.case x do
z = Asd -> z
z = Qwe in x when x > 0 -> z
z = Rty in x, _ when x > 0 -> z
end
assert res == c
c = {Qwe, 1}
x = ExampleDU.from! c
res = ExampleDU.case x do
z = Asd -> z
z = Qwe in x when x > 0 -> z
z = Rty in x, _ when x > 0 -> z
end
assert res == c
c = {Rty, 1, 1}
x = ExampleDU.from! c
res = ExampleDU.case x do
z = Asd -> z
z = Qwe in x when x > 0 -> z
z = Rty in x, _ when x > 0 -> z
end
assert res == c
# tests for pure atoms
use ExampleDUa
c = :asd
x = ExampleDUa.from! c
res = ExampleDUa.case x do
z = :asd -> z
z = :qwe in x when x > 0 -> z
z = :rty in x, _ when x > 0 -> z
end
assert res == c
c = {:qwe, 1}
x = ExampleDUa.from! c
res = ExampleDUa.case x do
z = :asd -> z
z = :qwe in x when x > 0 -> z
z = :rty in x, _ when x > 0 -> z
end
assert res == c
c = {:rty, 1, 1}
x = ExampleDUa.from! c
res = ExampleDUa.case x do
z = :asd -> z
z = :qwe in x when x > 0 -> z
z = :rty in x, _ when x > 0 -> z
end
assert res == c
end
test "discriminated union's `case` macro cases can have a pattern match for whole case expression in the tuple format for case arguments with guard present" do
use ExampleDU
c = Asd
x = ExampleDU.from! c
res = ExampleDU.case x do
z = Asd -> z
z = {Qwe, x} when x > 0 -> z
z = {Rty, x, _} when x > 0 -> z
end
assert res == c
c = {Qwe, 1}
x = ExampleDU.from! c
res = ExampleDU.case x do
z = Asd -> z
z = {Qwe, x} when x > 0 -> z
z = {Rty, x, _}when x > 0 -> z
end
assert res == c
c = {Rty, 1, 1}
x = ExampleDU.from! c
res = ExampleDU.case x do
z = Asd -> z
z = {Qwe, x} when x > 0 -> z
z = {Rty, x, _} when x > 0 -> z
end
assert res == c
# tests for pure atoms
use ExampleDUa
c = :asd
x = ExampleDUa.from! c
res = ExampleDUa.case x do
z = :asd -> z
z = {:qwe, x} when x > 0 -> z
z = {:rty, x, _} when x > 0 -> z
end
assert res == c
c = {:qwe, 1}
x = ExampleDUa.from! c
res = ExampleDUa.case x do
z = :asd -> z
z = {:qwe, x} when x > 0 -> z
z = {:rty, x, _}when x > 0 -> z
end
assert res == c
c = {:rty, 1, 1}
x = ExampleDUa.from! c
res = ExampleDUa.case x do
z = :asd -> z
z = {:qwe, x} when x > 0 -> z
z = {:rty, x, _} when x > 0 -> z
end
assert res == c
end
test "discriminated union's `case` macro should riase on unknow tags and cases" do
assert_raise UndefinedUnionCaseError, fn ->
Code.eval_quoted(quote do
use ExampleDU
x = struct ExampleDU, case: Qqq
ExampleDU.case x do
Qwe -> :ok
end
end)
end
assert_raise UndefinedUnionCaseError, fn ->
Code.eval_quoted(quote do
use ExampleDUa
x = struct ExampleDUa, case: :qqq
ExampleDUa.case x do
:qwe -> :ok
end
end)
end
assert_raise UndefinedUnionCaseError, fn ->
Code.eval_quoted(quote do
use ExampleDU
x = struct ExampleDU, case: Qqq
ExampleDU.case x do
Wat -> :ok
end
end)
end
assert_raise UndefinedUnionCaseError, fn ->
Code.eval_quoted(quote do
use ExampleDUa
x = struct ExampleDUa, case: :qqq
ExampleDUa.case x do
:wat -> :ok
end
end)
end
end
test "discriminated union's `case` macro should riase on unknow tags and cases even whet `allow_underscore` is true" do
assert_raise UndefinedUnionCaseError, fn ->
Code.eval_quoted(quote do
use ExampleDU
x = struct ExampleDU, case: Qqq
ExampleDU.case x, allow_underscore: true do
Qwe -> :ok
end
end)
end
assert_raise UndefinedUnionCaseError, fn ->
Code.eval_quoted(quote do
use ExampleDUa
x = struct ExampleDUa, case: :qqq
ExampleDUa.case x, allow_underscore: true do
:qwe -> :ok
end
end)
end
assert_raise UndefinedUnionCaseError, fn ->
Code.eval_quoted(quote do
use ExampleDU
x = struct ExampleDU, case: Qqq
ExampleDU.case x, allow_underscore: true do
Wat -> :ok
end
end)
end
assert_raise UndefinedUnionCaseError, fn ->
Code.eval_quoted(quote do
use ExampleDUa
x = struct ExampleDUa, case: :qqq
ExampleDUa.case x, allow_underscore: true do
:wat -> :ok
end
end)
end
end
test "discriminated union's `case` macro should riase when not all cases are exhausted" do
testdu_msg = "not all defined union cases are used, should be all of: Asd, Qwe in \"any\", Rty in \"integer\" * \"atom\""
testdua_msg = "not all defined union cases are used, should be all of: :asd, :qwe in \"any\", :rty in \"integer\" * \"atom\""
assert_raise MissingUnionCaseError, testdu_msg, fn ->
Code.eval_quoted(quote do
use ExampleDU
x = struct ExampleDU, case: Asd
ExampleDU.case x do
Asd -> :asd
Qwe in _ -> :qwe
end
end)
end
assert_raise MissingUnionCaseError, testdu_msg, fn ->
Code.eval_quoted(quote do
use ExampleDU
x = struct ExampleDU, case: Asd
ExampleDU.case x do
Asd -> :asd
Qwe in 1 -> :qwe
Qwe in x when x > 1 -> :qwe
Qwe in _ -> :qwe
end
end)
end
assert_raise MissingUnionCaseError, testdua_msg, fn ->
Code.eval_quoted(quote do
use ExampleDUa
x = struct ExampleDUa, case: :asd
ExampleDUa.case x do
:asd -> :asd
:qwe in _ -> :qwe
end
end)
end
assert_raise MissingUnionCaseError, testdua_msg, fn ->
Code.eval_quoted(quote do
use ExampleDUa
x = struct ExampleDUa, case: :asd
ExampleDUa.case x do
:asd -> :asd
:qwe in 1 -> :qwe
:qwe in x when x > 2 -> :qwe
:qwe in _ -> :qwe
end
end)
end
end
test "discriminated union's `case` macro should riase when not all cases are exhausted unless `allow_underscore` is set to true" do
testdu_msg = "not all defined union cases are used, should be at least a catch all statement (_) and any combination of: Asd, Qwe in \"any\", Rty in \"integer\" * \"atom\""
testdua_msg = "not all defined union cases are used, should be at least a catch all statement (_) and any combination of: :asd, :qwe in \"any\", :rty in \"integer\" * \"atom\""
assert_raise MissingUnionCaseError, testdu_msg, fn ->
Code.eval_quoted(quote do
use ExampleDU
x = struct ExampleDU, case: Asd
ExampleDU.case x, allow_underscore: true do
Asd -> :asd
Qwe in _ -> :qwe
end
end)
end
assert_raise MissingUnionCaseError, testdu_msg, fn ->
Code.eval_quoted(quote do
use ExampleDU
x = struct ExampleDU, case: Asd
ExampleDU.case x, allow_underscore: true do
Asd -> :asd
Qwe in 1 -> :qwe
Qwe in x when x > 1 -> :qwe
Qwe in _ -> :qwe
end
end)
end
assert_raise MissingUnionCaseError, testdua_msg, fn ->
Code.eval_quoted(quote do
use ExampleDUa
x = struct ExampleDUa, case: :asd
ExampleDUa.case x, allow_underscore: true do
:asd -> :asd
:qwe in _ -> :qwe
end
end)
end
assert_raise MissingUnionCaseError, testdua_msg, fn ->
Code.eval_quoted(quote do
use ExampleDUa
x = struct ExampleDUa, case: :asd
ExampleDUa.case x, allow_underscore: true do
:asd -> :asd
:qwe in 1 -> :qwe
:qwe in x when x > 2 -> :qwe
:qwe in _ -> :qwe
end
end)
end
end
end
| 30.371642 | 180 | 0.421544 |
ff464f06d19539b7cd67fbca0b8bd25e1d2b02d4 | 5,875 | ex | Elixir | lib/mongooseice/udp/worker.ex | glassechidna/MongooseICE | c2ea99f47460fd7293b51eaa72fbce122a60affe | [
"Apache-2.0"
] | 90 | 2017-09-26T12:20:06.000Z | 2022-01-30T17:58:11.000Z | lib/mongooseice/udp/worker.ex | glassechidna/MongooseICE | c2ea99f47460fd7293b51eaa72fbce122a60affe | [
"Apache-2.0"
] | 39 | 2017-01-20T08:54:13.000Z | 2017-09-13T11:30:14.000Z | lib/mongooseice/udp/worker.ex | glassechidna/MongooseICE | c2ea99f47460fd7293b51eaa72fbce122a60affe | [
"Apache-2.0"
] | 13 | 2018-03-29T07:03:25.000Z | 2022-03-06T10:21:45.000Z | defmodule MongooseICE.UDP.Worker do
@moduledoc false
# Process handling STUN messages received over UDP
#
# Currently when worker receives a message which can't
# be decoded or doesn't know how to process a message
# it simply crashes.
alias MongooseICE.UDP
alias MongooseICE.TURN
alias MongooseICE.STUN
alias MongooseICE.UDP.{WorkerSupervisor, Dispatcher}
use GenServer
require Logger
# should be configurable
@timeout 5_000
# how many packets should we accept per one :inet.setopts(socket, {:active, N}) call?
@burst_length 500
@type state :: %{socket: UDP.socket,
nonce_updated_at: integer,
client: MongooseICE.client_info,
server: UDP.server_opts,
turn: TURN.t
}
# Starts a UDP worker
@spec start(atom, MongooseICE.client_info) :: {:ok, pid} | :error
def start(worker_sup, client) do
WorkerSupervisor.start_worker(worker_sup, client)
end
# Process UDP datagram which might be STUN message
@spec process_data(pid, binary) :: :ok
def process_data(pid, data) do
GenServer.cast(pid, {:process_data, data})
end
def start_link(dispatcher, server_opts, client) do
GenServer.start_link(__MODULE__, [dispatcher, server_opts, client])
end
## GenServer callbacks
def init([dispatcher, server_opts, client]) do
_ = Dispatcher.register_worker(dispatcher, self(), client.ip, client.port)
state = %{client: client, nonce_updated_at: 0,
server: server_opts, turn: %TURN{}}
{:ok, state, timeout(state)}
end
def handle_call(:get_permissions, _from, state) do
{:reply, state.turn.permissions, state, timeout(state)}
end
def handle_call(:get_channels, _from, state) do
{:reply, state.turn.channels, state}
end
def handle_cast({:process_data, data}, state) do
state = maybe_update_nonce(state)
next_state =
case STUN.process_message(data, state.client, state.server, state.turn) do
{:ok, {:void, new_turn_state}} ->
%{state | turn: new_turn_state}
{:ok, {resp, new_turn_state}} ->
:ok = :gen_udp.send(state.client.socket, state.client.ip,
state.client.port, resp)
%{state | turn: new_turn_state}
end
{:noreply, next_state, timeout(next_state)}
end
def handle_info({:udp, socket, ip, port, data}, state = %{turn:
%TURN{allocation: %TURN.Allocation{socket: socket}}}) do
turn_state = state.turn
next_state =
case TURN.has_permission(turn_state, ip) do
{^turn_state, false} ->
Logger.debug(~s"Dropped data from peer #{ip}:#{port} due to no permission")
__MODULE__.handle_peer_data(:no_permission, ip, port, data, state)
{new_turn_state, false} ->
Logger.debug(~s"Dropped data from peer #{ip}:#{port} due to stale permission")
next_state = %{state | turn: new_turn_state}
__MODULE__.handle_peer_data(:stale_permission, ip, port, data, next_state)
{^turn_state, true} ->
Logger.debug(~s"Processing data from peer #{ip}:#{port}")
__MODULE__.handle_peer_data(:allowed, ip, port, data, state)
end
{:noreply, next_state, timeout(next_state)}
end
def handle_info({:udp_passive, socket},
%{turn: %TURN{allocation: %TURN.Allocation{socket: socket}}} = state) do
n = burst_length()
Logger.debug(~s"Processed #{n} peer packets")
:inet.setopts(socket, [active: n])
{:noreply, state, timeout(state)}
end
def handle_info(:timeout, state) do
handle_timeout(state)
end
def handle_peer_data(:allowed, ip, port, data, state) do
{turn, payload} =
case TURN.has_channel(state.turn, {ip, port}) do
{:ok, turn_state, channel} ->
{turn_state, channel_data(channel.number, data)}
{:error, turn_state} ->
{turn_state, data_params(ip, port, data)}
end
:ok = :gen_udp.send(state.client.socket, state.client.ip, state.client.port,
Jerboa.Format.encode(payload))
%{state | turn: turn}
end
# This function clause is for (not) handling rejected peer's data.
# It exists to make testing easier and to delete expired channels.
def handle_peer_data(_, ip, port, _data, state) do
turn_state =
case TURN.has_channel(state.turn, {ip, port}) do
{:ok, turn, _} -> turn
{:error, turn} -> turn
end
%{state | turn: turn_state}
end
# Extracted as a separate function,
# as it's easier to trace for side effects this way.
defp handle_timeout(state) do
{:stop, :normal, state}
end
defp maybe_update_nonce(state) do
%{nonce_updated_at: last_update, turn: turn_state} = state
expire_at = last_update + MongooseICE.Auth.nonce_lifetime()
now = MongooseICE.Time.system_time(:second)
case expire_at < now do
true ->
new_turn_state = %TURN{turn_state | nonce: MongooseICE.Auth.gen_nonce()}
%{state | turn: new_turn_state, nonce_updated_at: now}
false ->
state
end
end
defp timeout(%{turn: %TURN{allocation: nil}}), do: @timeout
defp timeout(%{turn: %TURN{allocation: allocation}}) do
%TURN.Allocation{expire_at: expire_at} = allocation
now = MongooseICE.Time.system_time(:second)
timeout_ms = (expire_at - now) * 1000
max(0, timeout_ms)
end
defp data_params(ip, port, data) do
alias Jerboa.Params, as: P
alias Jerboa.Format.Body.Attribute.{Data, XORPeerAddress}
P.new()
|> P.put_class(:indication)
|> P.put_method(:data)
|> P.put_attr(%Data{content: data})
|> P.put_attr(XORPeerAddress.new(ip, port))
end
defp channel_data(channel_number, data) do
alias Jerboa.ChannelData
%ChannelData{channel_number: channel_number, data: data}
end
def burst_length, do: @burst_length
end
| 33.764368 | 90 | 0.655149 |
ff467564b5f3cce73b87ea849bbfea30b42134f8 | 1,129 | exs | Elixir | config/config.exs | arghmeleg/scrappy | f7002d00e789c1a732c7133bb5b73ad9575cb83f | [
"MIT"
] | 1 | 2019-12-24T00:56:41.000Z | 2019-12-24T00:56:41.000Z | config/config.exs | arghmeleg/scrappy | f7002d00e789c1a732c7133bb5b73ad9575cb83f | [
"MIT"
] | null | null | null | config/config.exs | arghmeleg/scrappy | f7002d00e789c1a732c7133bb5b73ad9575cb83f | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# third-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :scrappy, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:scrappy, :key)
#
# You can also configure a third-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env()}.exs"
| 36.419355 | 73 | 0.751107 |
ff46838d95d25a4d18ddfba9d60c3f94d02c5e5f | 733 | exs | Elixir | test/suite/draft6/min_items_test.exs | starbelly/json_xema | 73ca23f9ce51d450d2f9cf0509ee6bb82f9a8c79 | [
"MIT"
] | 1 | 2019-06-21T10:55:33.000Z | 2019-06-21T10:55:33.000Z | test/suite/draft6/min_items_test.exs | starbelly/json_xema | 73ca23f9ce51d450d2f9cf0509ee6bb82f9a8c79 | [
"MIT"
] | null | null | null | test/suite/draft6/min_items_test.exs | starbelly/json_xema | 73ca23f9ce51d450d2f9cf0509ee6bb82f9a8c79 | [
"MIT"
] | null | null | null | defmodule Draft6.MinItemsTest do
use ExUnit.Case, async: true
import JsonXema, only: [valid?: 2]
describe "minItems validation" do
setup do
%{schema: ~s(
{
"minItems": 1
}
) |> Jason.decode!() |> JsonXema.new()}
end
test "longer is valid", %{schema: schema} do
data = [1, 2]
assert valid?(schema, data)
end
test "exact length is valid", %{schema: schema} do
data = [1]
assert valid?(schema, data)
end
test "too short is invalid", %{schema: schema} do
data = []
refute valid?(schema, data)
end
test "ignores non-arrays", %{schema: schema} do
data = ""
assert valid?(schema, data)
end
end
end
| 20.361111 | 54 | 0.556617 |
ff46a3656adf28218db3853df7b5638e9f2b83fd | 1,839 | ex | Elixir | lib/mix/tasks/protox/generate.ex | zolakeith/protox | 19ce8e9ae5d2f2505d683c066e6f723be520945d | [
"MIT"
] | null | null | null | lib/mix/tasks/protox/generate.ex | zolakeith/protox | 19ce8e9ae5d2f2505d683c066e6f723be520945d | [
"MIT"
] | null | null | null | lib/mix/tasks/protox/generate.ex | zolakeith/protox | 19ce8e9ae5d2f2505d683c066e6f723be520945d | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.Protox.Generate do
@moduledoc """
Generate Elixir code from `.proto` files.
Example:
`mix protox.generate --output-path=lib/message.ex --include-path=. message.proto`
The generated file will be usable in any project as long as protox is declared
in the dependencies (the generated file still needs functions from the protox runtime).
You can use the `--namespace` option to prepend a namespace to all generated modules.
If you have large protobuf files, you can use the `--multiple-files` option to generate
one file per module.
Finally, you can pass the option `--keep-unknown-fields=false` to remove support of
unknown fields.
"""
@shortdoc "Generate Elixir code from Protobuf definitions"
use Mix.Task
@impl Mix.Task
@spec run(any) :: any
def run(args) do
with {opts, files, []} <-
OptionParser.parse(args,
strict: [
output_path: :string,
include_path: :keep,
namespace: :string,
multiple_files: :boolean,
keep_unknown_fields: :boolean
]
),
{:ok, output_path} <- Keyword.fetch(opts, :output_path) do
{include_paths, opts} = Keyword.pop(opts, :include_path)
{namespace, opts} = Keyword.pop(opts, :namespace)
{multiple_files, opts} = Keyword.pop(opts, :multiple_files, false)
files
|> Protox.generate_module_code(
Path.expand(output_path),
multiple_files,
include_paths,
namespace,
opts
)
|> Enum.each(&generate_file/1)
else
err ->
IO.puts("Failed to generate code: #{inspect(err)}")
:error
end
end
defp generate_file(%Protox.FileContent{name: file_name, content: content}) do
File.write!(file_name, content)
end
end
| 30.147541 | 89 | 0.637303 |
ff46e8f8e0f2c4253d2a8a4cb2ce7805b11760d5 | 3,209 | exs | Elixir | test/config/config.exs | rosetta-home/rosetta_rpi3 | 61ee6f947577dea1b101fe887211dc84d28fea4b | [
"Apache-2.0"
] | 6 | 2017-11-02T14:56:05.000Z | 2018-04-24T13:53:49.000Z | test/config/config.exs | rosetta-home/rosetta_rpi3 | 61ee6f947577dea1b101fe887211dc84d28fea4b | [
"Apache-2.0"
] | 1 | 2018-03-10T19:59:52.000Z | 2018-03-10T22:58:06.000Z | test/config/config.exs | rosetta-home/rosetta_rpi3 | 61ee6f947577dea1b101fe887211dc84d28fea4b | [
"Apache-2.0"
] | null | null | null | use Mix.Config
# Repository specific configuration
system = :nerves_system_rpi3
platform = "rpi3"
arch = "arm"
app_part_devparth = "/dev/mmcblk0p3"
network_interface = System.get_env("NERVES_NETWORK_INTERFACE") || "eth0"
# Environment specific configuration
# Nerves Project test farm configuration
# NERVES_TEST_SERVER = nerves-test-server.herokuapp.com
# WEBSOCKET_PROTOCOL = wss
test_server = System.get_env("NERVES_TEST_SERVER")
websocket_protocol = System.get_env("WEBSOCKET_PROTOCOL") || "ws"
# Common configuration
# Configure shoehorn boot order.
config :shoehorn,
app: :nerves_system_test,
init: [:nerves_runtime, :system_registry_term_storage, :nerves_network]
# Only trust signed firmware
config :nerves_system_test, :firmware,
public_key: System.get_env("NERVES_FW_PUB_KEY")
# Configure system_registry term storage to store the wifi credentials on the
# app data partition. If the device is using eth0 as the primary connection
# mechanism the wlan0 settings do not need to be configured.
config :system_registry, SystemRegistry.TermStorage,
path: "/root/system_registry",
scopes: [
[:config, :network_interface, "wlan0", :ssid],
[:config, :network_interface, "wlan0", :psk]
]
# Configure the default interface settings.
# wlan0 | eth0 - Used to establish a connection to the test server.
# usb0 - configured with linklocal to be validated as part of the test results.
config :nerves_network, :default,
eth0: [
ipv4_address_method: :dhcp
],
wlan0: [
ipv4_address_method: :dhcp
],
usb0: [
ipv4_address_method: :linklocal
]
# Configure the url for the connection to the test server phoenix channel socket.
config :nerves_system_test, NervesTestServer.Socket,
url: "#{websocket_protocol}://#{test_server}/socket/websocket"
# Configure the test suite. nerves_system_test needs to know information such as
# system - the name of the system repo the tests are being executed on
# network_interface - the interface that should be used for reporting the
# results on
# tests - the locations for the tests to run.
# Currently, it is only supported that tests are included in the priv_dir
# of the app and dependencies. The default layout runs tests that are common
# across devices (:nerves_system_test) and those that are specific to the
# device (this app)
config :nerves_system_test,
system: system,
network_interface:
network_interface,
tests: [
{:test, :priv_dir, "test"},
{:nerves_system_test, :priv_dir, "test"}
]
# The configuration stored here is duplicated from the project so it can be
# validated by nerves_system_test because the source is unavailable at runtime.
config :nerves_runtime, :kv,
nerves_fw_application_part0_devpath: app_part_devparth,
nerves_fw_application_part0_fstype: "ext4",
nerves_fw_application_part0_target: "/root",
nerves_fw_architecture: arch,
nerves_fw_author: "The Nerves Team",
nerves_fw_description: Mix.Project.config()[:description],
nerves_fw_platform: platform,
nerves_fw_product: Mix.Project.config()[:app],
nerves_fw_vcs_identifier: System.get_env("NERVES_FW_VCS_IDENTIFIER"),
nerves_fw_version: Mix.Project.config()[:version]
| 34.880435 | 81 | 0.762231 |
ff470083b5458a99d67fdf6b4c05c6536571480b | 18,651 | ex | Elixir | clients/content/lib/google_api/content/v2/model/product.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/content/lib/google_api/content/v2/model/product.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/product.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Content.V2.Model.Product do
@moduledoc """
Product data. After inserting, updating, or deleting a product, it may take several minutes before changes take effect.
## Attributes
* `source` (*type:* `String.t`, *default:* `nil`) - The source of the offer, i.e., how the offer was created.
* `targetCountry` (*type:* `String.t`, *default:* `nil`) - The CLDR territory code for the item.
* `promotionIds` (*type:* `list(String.t)`, *default:* `nil`) - The unique ID of a promotion.
* `energyEfficiencyClass` (*type:* `String.t`, *default:* `nil`) - The energy efficiency class as defined in EU directive 2010/30/EU.
* `customLabel3` (*type:* `String.t`, *default:* `nil`) - Custom label 3 for custom grouping of items in a Shopping campaign.
* `maxEnergyEfficiencyClass` (*type:* `String.t`, *default:* `nil`) - The energy efficiency class as defined in EU directive 2010/30/EU.
* `title` (*type:* `String.t`, *default:* `nil`) - Title of the item.
* `availabilityDate` (*type:* `String.t`, *default:* `nil`) - The day a pre-ordered product becomes available for delivery, in ISO 8601 format.
* `availability` (*type:* `String.t`, *default:* `nil`) - Availability status of the item.
* `contentLanguage` (*type:* `String.t`, *default:* `nil`) - The two-letter ISO 639-1 language code for the item.
* `sellOnGoogleQuantity` (*type:* `String.t`, *default:* `nil`) - The quantity of the product that is available for selling on Google. Supported only for online products.
* `customLabel2` (*type:* `String.t`, *default:* `nil`) - Custom label 2 for custom grouping of items in a Shopping campaign.
* `displayAdsSimilarIds` (*type:* `list(String.t)`, *default:* `nil`) - Advertiser-specified recommendations.
* `shippingWeight` (*type:* `GoogleApi.Content.V2.Model.ProductShippingWeight.t`, *default:* `nil`) - Weight of the item for shipping.
* `shippingLength` (*type:* `GoogleApi.Content.V2.Model.ProductShippingDimension.t`, *default:* `nil`) - Length of the item for shipping.
* `id` (*type:* `String.t`, *default:* `nil`) - The REST ID of the product. Content API methods that operate on products take this as their productId parameter.
The REST ID for a product is of the form channel:contentLanguage:targetCountry:offerId.
* `customLabel0` (*type:* `String.t`, *default:* `nil`) - Custom label 0 for custom grouping of items in a Shopping campaign.
* `ageGroup` (*type:* `String.t`, *default:* `nil`) - Target age group of the item.
* `imageLink` (*type:* `String.t`, *default:* `nil`) - URL of an image of the item.
* `warnings` (*type:* `list(GoogleApi.Content.V2.Model.Error.t)`, *default:* `nil`) - Read-only warnings.
* `minEnergyEfficiencyClass` (*type:* `String.t`, *default:* `nil`) - The energy efficiency class as defined in EU directive 2010/30/EU.
* `price` (*type:* `GoogleApi.Content.V2.Model.Price.t`, *default:* `nil`) - Price of the item.
* `channel` (*type:* `String.t`, *default:* `nil`) - The item's channel (online or local).
* `minHandlingTime` (*type:* `String.t`, *default:* `nil`) - Minimal product handling time (in business days).
* `customLabel4` (*type:* `String.t`, *default:* `nil`) - Custom label 4 for custom grouping of items in a Shopping campaign.
* `brand` (*type:* `String.t`, *default:* `nil`) - Brand of the item.
* `salePriceEffectiveDate` (*type:* `String.t`, *default:* `nil`) - Date range during which the item is on sale (see products data specification).
* `expirationDate` (*type:* `String.t`, *default:* `nil`) - Date on which the item should expire, as specified upon insertion, in ISO 8601 format. The actual expiration date in Google Shopping is exposed in productstatuses as googleExpirationDate and might be earlier if expirationDate is too far in the future.
* `condition` (*type:* `String.t`, *default:* `nil`) - Condition or state of the item.
* `material` (*type:* `String.t`, *default:* `nil`) - The material of which the item is made.
* `googleProductCategory` (*type:* `String.t`, *default:* `nil`) - Google's category of the item (see Google product taxonomy).
* `isBundle` (*type:* `boolean()`, *default:* `nil`) - Whether the item is a merchant-defined bundle. A bundle is a custom grouping of different products sold by a merchant for a single price.
* `mobileLink` (*type:* `String.t`, *default:* `nil`) - Link to a mobile-optimized version of the landing page.
* `sizeType` (*type:* `String.t`, *default:* `nil`) - The cut of the item. Recommended for apparel items.
* `additionalImageLinks` (*type:* `list(String.t)`, *default:* `nil`) - Additional URLs of images of the item.
* `shipping` (*type:* `list(GoogleApi.Content.V2.Model.ProductShipping.t)`, *default:* `nil`) - Shipping rules.
* `validatedDestinations` (*type:* `list(String.t)`, *default:* `nil`) - Deprecated. The read-only list of intended destinations which passed validation.
* `destinations` (*type:* `list(GoogleApi.Content.V2.Model.ProductDestination.t)`, *default:* `nil`) - Specifies the intended destinations for the product.
* `costOfGoodsSold` (*type:* `GoogleApi.Content.V2.Model.Price.t`, *default:* `nil`) - Cost of goods sold. Used for gross profit reporting.
* `additionalProductTypes` (*type:* `list(String.t)`, *default:* `nil`) - Additional categories of the item (formatted as in products data specification).
* `installment` (*type:* `GoogleApi.Content.V2.Model.Installment.t`, *default:* `nil`) - Number and amount of installments to pay for an item. Brazil only.
* `onlineOnly` (*type:* `boolean()`, *default:* `nil`) - Deprecated. Whether an item is available for purchase only online.
* `multipack` (*type:* `String.t`, *default:* `nil`) - The number of identical products in a merchant-defined multipack.
* `salePrice` (*type:* `GoogleApi.Content.V2.Model.Price.t`, *default:* `nil`) - Advertised sale price of the item.
* `sizeSystem` (*type:* `String.t`, *default:* `nil`) - System in which the size is specified. Recommended for apparel items.
* `shippingLabel` (*type:* `String.t`, *default:* `nil`) - The shipping label of the product, used to group product in account-level shipping rules.
* `displayAdsValue` (*type:* `float()`, *default:* `nil`) - Offer margin for dynamic remarketing campaigns.
* `kind` (*type:* `String.t`, *default:* `content#product`) - Identifies what kind of resource this is. Value: the fixed string "content#product".
* `loyaltyPoints` (*type:* `GoogleApi.Content.V2.Model.LoyaltyPoints.t`, *default:* `nil`) - Loyalty points that users receive after purchasing the item. Japan only.
* `displayAdsLink` (*type:* `String.t`, *default:* `nil`) - URL directly to your item's landing page for dynamic remarketing campaigns.
* `displayAdsId` (*type:* `String.t`, *default:* `nil`) - An identifier for an item for dynamic remarketing campaigns.
* `customGroups` (*type:* `list(GoogleApi.Content.V2.Model.CustomGroup.t)`, *default:* `nil`) - A list of custom (merchant-provided) custom attribute groups.
* `gtin` (*type:* `String.t`, *default:* `nil`) - Global Trade Item Number (GTIN) of the item.
* `description` (*type:* `String.t`, *default:* `nil`) - Description of the item.
* `shippingWidth` (*type:* `GoogleApi.Content.V2.Model.ProductShippingDimension.t`, *default:* `nil`) - Width of the item for shipping.
* `displayAdsTitle` (*type:* `String.t`, *default:* `nil`) - Title of an item for dynamic remarketing campaigns.
* `unitPricingMeasure` (*type:* `GoogleApi.Content.V2.Model.ProductUnitPricingMeasure.t`, *default:* `nil`) - The measure and dimension of an item.
* `gender` (*type:* `String.t`, *default:* `nil`) - Target gender of the item.
* `customAttributes` (*type:* `list(GoogleApi.Content.V2.Model.CustomAttribute.t)`, *default:* `nil`) - A list of custom (merchant-provided) attributes. It can also be used for submitting any attribute of the feed specification in its generic form (e.g., { "name": "size type", "value": "regular" }). This is useful for submitting attributes not explicitly exposed by the API.
* `taxes` (*type:* `list(GoogleApi.Content.V2.Model.ProductTax.t)`, *default:* `nil`) - Tax information.
* `adwordsRedirect` (*type:* `String.t`, *default:* `nil`) - Allows advertisers to override the item URL when the product is shown within the context of Product Ads.
* `shippingHeight` (*type:* `GoogleApi.Content.V2.Model.ProductShippingDimension.t`, *default:* `nil`) - Height of the item for shipping.
* `mpn` (*type:* `String.t`, *default:* `nil`) - Manufacturer Part Number (MPN) of the item.
* `pattern` (*type:* `String.t`, *default:* `nil`) - The item's pattern (e.g. polka dots).
* `customLabel1` (*type:* `String.t`, *default:* `nil`) - Custom label 1 for custom grouping of items in a Shopping campaign.
* `adwordsGrouping` (*type:* `String.t`, *default:* `nil`) - Used to group items in an arbitrary way. Only for CPA%, discouraged otherwise.
* `aspects` (*type:* `list(GoogleApi.Content.V2.Model.ProductAspect.t)`, *default:* `nil`) - Deprecated. Do not use.
* `offerId` (*type:* `String.t`, *default:* `nil`) - A unique identifier for the item. Leading and trailing whitespaces are stripped and multiple whitespaces are replaced by a single whitespace upon submission. Only valid unicode characters are accepted. See the products feed specification for details.
Note: Content API methods that operate on products take the REST ID of the product, not this identifier.
* `maxHandlingTime` (*type:* `String.t`, *default:* `nil`) - Maximal product handling time (in business days).
* `itemGroupId` (*type:* `String.t`, *default:* `nil`) - Shared identifier for all variants of the same product.
* `color` (*type:* `String.t`, *default:* `nil`) - Color of the item.
* `link` (*type:* `String.t`, *default:* `nil`) - URL directly linking to your item's page on your website.
* `adwordsLabels` (*type:* `list(String.t)`, *default:* `nil`) - Similar to adwords_grouping, but only works on CPC.
* `productType` (*type:* `String.t`, *default:* `nil`) - Your category of the item (formatted as in products data specification).
* `unitPricingBaseMeasure` (*type:* `GoogleApi.Content.V2.Model.ProductUnitPricingBaseMeasure.t`, *default:* `nil`) - The preference of the denominator of the unit price.
* `adult` (*type:* `boolean()`, *default:* `nil`) - Set to true if the item is targeted towards adults.
* `identifierExists` (*type:* `boolean()`, *default:* `nil`) - False when the item does not have unique product identifiers appropriate to its category, such as GTIN, MPN, and brand. Required according to the Unique Product Identifier Rules for all target countries except for Canada.
* `sizes` (*type:* `list(String.t)`, *default:* `nil`) - Size of the item.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:source => String.t(),
:targetCountry => String.t(),
:promotionIds => list(String.t()),
:energyEfficiencyClass => String.t(),
:customLabel3 => String.t(),
:maxEnergyEfficiencyClass => String.t(),
:title => String.t(),
:availabilityDate => String.t(),
:availability => String.t(),
:contentLanguage => String.t(),
:sellOnGoogleQuantity => String.t(),
:customLabel2 => String.t(),
:displayAdsSimilarIds => list(String.t()),
:shippingWeight => GoogleApi.Content.V2.Model.ProductShippingWeight.t(),
:shippingLength => GoogleApi.Content.V2.Model.ProductShippingDimension.t(),
:id => String.t(),
:customLabel0 => String.t(),
:ageGroup => String.t(),
:imageLink => String.t(),
:warnings => list(GoogleApi.Content.V2.Model.Error.t()),
:minEnergyEfficiencyClass => String.t(),
:price => GoogleApi.Content.V2.Model.Price.t(),
:channel => String.t(),
:minHandlingTime => String.t(),
:customLabel4 => String.t(),
:brand => String.t(),
:salePriceEffectiveDate => String.t(),
:expirationDate => String.t(),
:condition => String.t(),
:material => String.t(),
:googleProductCategory => String.t(),
:isBundle => boolean(),
:mobileLink => String.t(),
:sizeType => String.t(),
:additionalImageLinks => list(String.t()),
:shipping => list(GoogleApi.Content.V2.Model.ProductShipping.t()),
:validatedDestinations => list(String.t()),
:destinations => list(GoogleApi.Content.V2.Model.ProductDestination.t()),
:costOfGoodsSold => GoogleApi.Content.V2.Model.Price.t(),
:additionalProductTypes => list(String.t()),
:installment => GoogleApi.Content.V2.Model.Installment.t(),
:onlineOnly => boolean(),
:multipack => String.t(),
:salePrice => GoogleApi.Content.V2.Model.Price.t(),
:sizeSystem => String.t(),
:shippingLabel => String.t(),
:displayAdsValue => float(),
:kind => String.t(),
:loyaltyPoints => GoogleApi.Content.V2.Model.LoyaltyPoints.t(),
:displayAdsLink => String.t(),
:displayAdsId => String.t(),
:customGroups => list(GoogleApi.Content.V2.Model.CustomGroup.t()),
:gtin => String.t(),
:description => String.t(),
:shippingWidth => GoogleApi.Content.V2.Model.ProductShippingDimension.t(),
:displayAdsTitle => String.t(),
:unitPricingMeasure => GoogleApi.Content.V2.Model.ProductUnitPricingMeasure.t(),
:gender => String.t(),
:customAttributes => list(GoogleApi.Content.V2.Model.CustomAttribute.t()),
:taxes => list(GoogleApi.Content.V2.Model.ProductTax.t()),
:adwordsRedirect => String.t(),
:shippingHeight => GoogleApi.Content.V2.Model.ProductShippingDimension.t(),
:mpn => String.t(),
:pattern => String.t(),
:customLabel1 => String.t(),
:adwordsGrouping => String.t(),
:aspects => list(GoogleApi.Content.V2.Model.ProductAspect.t()),
:offerId => String.t(),
:maxHandlingTime => String.t(),
:itemGroupId => String.t(),
:color => String.t(),
:link => String.t(),
:adwordsLabels => list(String.t()),
:productType => String.t(),
:unitPricingBaseMeasure => GoogleApi.Content.V2.Model.ProductUnitPricingBaseMeasure.t(),
:adult => boolean(),
:identifierExists => boolean(),
:sizes => list(String.t())
}
field(:source)
field(:targetCountry)
field(:promotionIds, type: :list)
field(:energyEfficiencyClass)
field(:customLabel3)
field(:maxEnergyEfficiencyClass)
field(:title)
field(:availabilityDate)
field(:availability)
field(:contentLanguage)
field(:sellOnGoogleQuantity)
field(:customLabel2)
field(:displayAdsSimilarIds, type: :list)
field(:shippingWeight, as: GoogleApi.Content.V2.Model.ProductShippingWeight)
field(:shippingLength, as: GoogleApi.Content.V2.Model.ProductShippingDimension)
field(:id)
field(:customLabel0)
field(:ageGroup)
field(:imageLink)
field(:warnings, as: GoogleApi.Content.V2.Model.Error, type: :list)
field(:minEnergyEfficiencyClass)
field(:price, as: GoogleApi.Content.V2.Model.Price)
field(:channel)
field(:minHandlingTime)
field(:customLabel4)
field(:brand)
field(:salePriceEffectiveDate)
field(:expirationDate)
field(:condition)
field(:material)
field(:googleProductCategory)
field(:isBundle)
field(:mobileLink)
field(:sizeType)
field(:additionalImageLinks, type: :list)
field(:shipping, as: GoogleApi.Content.V2.Model.ProductShipping, type: :list)
field(:validatedDestinations, type: :list)
field(:destinations, as: GoogleApi.Content.V2.Model.ProductDestination, type: :list)
field(:costOfGoodsSold, as: GoogleApi.Content.V2.Model.Price)
field(:additionalProductTypes, type: :list)
field(:installment, as: GoogleApi.Content.V2.Model.Installment)
field(:onlineOnly)
field(:multipack)
field(:salePrice, as: GoogleApi.Content.V2.Model.Price)
field(:sizeSystem)
field(:shippingLabel)
field(:displayAdsValue)
field(:kind)
field(:loyaltyPoints, as: GoogleApi.Content.V2.Model.LoyaltyPoints)
field(:displayAdsLink)
field(:displayAdsId)
field(:customGroups, as: GoogleApi.Content.V2.Model.CustomGroup, type: :list)
field(:gtin)
field(:description)
field(:shippingWidth, as: GoogleApi.Content.V2.Model.ProductShippingDimension)
field(:displayAdsTitle)
field(:unitPricingMeasure, as: GoogleApi.Content.V2.Model.ProductUnitPricingMeasure)
field(:gender)
field(:customAttributes, as: GoogleApi.Content.V2.Model.CustomAttribute, type: :list)
field(:taxes, as: GoogleApi.Content.V2.Model.ProductTax, type: :list)
field(:adwordsRedirect)
field(:shippingHeight, as: GoogleApi.Content.V2.Model.ProductShippingDimension)
field(:mpn)
field(:pattern)
field(:customLabel1)
field(:adwordsGrouping)
field(:aspects, as: GoogleApi.Content.V2.Model.ProductAspect, type: :list)
field(:offerId)
field(:maxHandlingTime)
field(:itemGroupId)
field(:color)
field(:link)
field(:adwordsLabels, type: :list)
field(:productType)
field(:unitPricingBaseMeasure, as: GoogleApi.Content.V2.Model.ProductUnitPricingBaseMeasure)
field(:adult)
field(:identifierExists)
field(:sizes, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.Product do
def decode(value, options) do
GoogleApi.Content.V2.Model.Product.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.Product do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 66.610714 | 380 | 0.675138 |
ff470a5025a02c5a5e3494394ce2bb93ce74809c | 5,984 | ex | Elixir | lib/canvas_api/services/comment_service.ex | usecanvas/api-v2 | 59214db3a2cf12eb939f22fed320fd10cb47cdfe | [
"Apache-2.0"
] | 123 | 2017-04-04T18:15:48.000Z | 2021-04-26T08:04:22.000Z | lib/canvas_api/services/comment_service.ex | usecanvas/api-v2 | 59214db3a2cf12eb939f22fed320fd10cb47cdfe | [
"Apache-2.0"
] | null | null | null | lib/canvas_api/services/comment_service.ex | usecanvas/api-v2 | 59214db3a2cf12eb939f22fed320fd10cb47cdfe | [
"Apache-2.0"
] | 17 | 2017-04-04T18:58:29.000Z | 2021-05-10T21:39:16.000Z | defmodule CanvasAPI.CommentService do
@moduledoc """
A service for viewing and manipulating comments.
"""
alias CanvasAPI.{Account, Canvas, CanvasService, Comment,
SlackNotifier, Team, User, UserService}
alias Ecto.Changeset
use CanvasAPI.Web, :service
@preload [:creator, canvas: [:team]]
@doc """
Create a new comment on a given block and block.
"""
@spec create(map, Keyword.t) :: {:ok, Comment.t} | {:error, Changeset.t}
def create(attrs, opts) do
%Comment{}
|> Comment.changeset(attrs)
|> put_canvas(attrs["canvas_id"], opts[:account])
|> put_block(attrs["block_id"])
|> put_creator(opts[:account])
|> Repo.insert
|> case do
{:ok, comment} ->
notify_comment(comment, "new_comment")
{:ok, comment}
error ->
error
end
end
@spec put_block(Changeset.t, String.t | nil) :: Changeset.t
defp put_block(changeset, id) when is_binary(id) do
with canvas when not is_nil(canvas) <- get_field(changeset, :canvas),
block when not is_nil(block) <- Canvas.find_block(canvas, id) do
put_change(changeset, :block_id, id)
else
_ -> add_error(changeset, :block, "was not found")
end
end
defp put_block(changeset, _),
do: add_error(changeset, :block, "is required")
@spec put_canvas(Changeset.t, String.t | nil, Account.t) :: Changeset.t
defp put_canvas(changeset, id, account) when is_binary(id) do
id
|> CanvasService.get(account: account)
|> case do
{:ok, canvas} ->
changeset |> put_assoc(:canvas, canvas)
{:error, _} ->
changeset |> add_error(:canvas, "was not found")
end
end
defp put_canvas(changeset, _, _),
do: changeset |> add_error(:canvas, "is required")
@spec put_creator(Changeset.t, Account.t) :: Changeset.t
defp put_creator(changeset, account) do
with canvas when not is_nil(canvas) <- get_field(changeset, :canvas) do
{:ok, user} = UserService.find_by_team(account, team_id: canvas.team_id)
put_assoc(changeset, :creator, user)
else
_ -> changeset
end
end
@doc """
Retrieve a single comment by ID.
"""
@spec get(String.t, Keyword.t) :: {:ok, Comment.t}
| {:error, :comment_not_found}
def get(id, opts \\ []) do
opts[:account]
|> comment_query
|> maybe_lock
|> Repo.get(id)
|> case do
comment = %Comment{} ->
{:ok, comment}
nil ->
{:error, :comment_not_found}
end
end
@doc """
List comments.
"""
@spec list(Keyword.t) :: [Comment.t]
def list(opts) do
opts[:account]
|> comment_query
|> filter(opts[:filter])
|> Repo.all
end
@spec filter(Ecto.Query.t, map | nil) :: Ecto.Query.t
defp filter(query, filter) when is_map(filter) do
filter
|> Enum.reduce(query, &do_filter/2)
end
defp filter(query, _), do: query
@spec do_filter({String.t, String.t}, Ecto.Query.t) :: Ecto.Query.t
defp do_filter({"canvas.id", canvas_id}, query),
do: where(query, canvas_id: ^canvas_id)
defp do_filter({"block.id", block_id}, query),
do: where(query, block_id: ^block_id)
defp do_filter(_, query), do: query
@doc """
Update a comment.
"""
@spec update(String.t | Comment.t, map, Keyword.t)
:: {:ok, Comment.t}
| {:error, Changeset.t | :comment_not_found | :does_not_own}
def update(id, attrs, opts \\ [])
def update(id, attrs, opts) when is_binary(id) do
Repo.transaction fn ->
with {:ok, comment} <- get(id, opts) do
__MODULE__.update(comment, attrs, opts)
end
|> case do
{:ok, comment} -> comment
{:error, error} -> Repo.rollback(error)
end
end
end
def update(comment, attrs, opts) do
if opts[:account].id == comment.creator.account_id do
comment
|> Comment.changeset(attrs)
|> Repo.update
|> case do
{:ok, comment} ->
notify_comment(comment, "updated_comment")
{:ok, comment}
error -> error
end
else
{:error, :does_not_own}
end
end
@doc """
Delete a comment.
"""
@spec delete(String.t | Comment.t, Keyword.t) :: {:ok, Comment.t}
| {:error, :comment_not_found}
def delete(id, opts \\ [])
def delete(id, opts) when is_binary(id) do
Repo.transaction fn ->
with {:ok, comment} <- get(id, opts) do
__MODULE__.delete(comment, opts)
end
|> case do
{:ok, comment} -> comment
{:error, error} -> Repo.rollback(error)
end
end
end
def delete(comment, _opts) do
comment
|> Repo.delete
|> case do
{:ok, comment} ->
notify_comment(comment, "deleted_comment")
{:ok, comment}
error -> error
end
end
@spec comment_query(Account.t | nil) :: Ecto.Query.t
defp comment_query(nil), do: Comment |> preload(^@preload)
defp comment_query(account) do
Comment
|> join(:left, [co], ca in Canvas, co.canvas_id == ca.id)
|> join(:left, [..., ca], t in Team, ca.team_id == t.id)
|> join(:left, [..., t], u in User, u.team_id == t.id)
|> where([..., u], u.account_id == ^account.id)
|> preload(^@preload)
end
@spec notify_comment(Comment.t, String.t) :: any
defp notify_comment(comment, event) do
if event == "new_comment", do: notify_slack(comment)
broadcast("canvas:#{comment.canvas_id}",
event,
"show.json",
comment: comment)
end
@spec notify_slack(Comment.t) :: any
defp notify_slack(comment) do
with {:ok, token} <- Team.get_token(comment.canvas.team, "slack"),
token = get_in(token.meta, ~w(bot bot_access_token)) do
comment.canvas.slack_channel_ids
|> Enum.each(
&SlackNotifier.delay(
{:notify_new_comment, [token, comment.id, &1]}))
SlackNotifier.delay({:dm_new_comment, [token, comment.id]})
end
end
end
| 27.832558 | 79 | 0.597092 |
ff4723a5c1f3c96a195bf59a148bf1a5bc84be57 | 9,367 | exs | Elixir | test/tanks_game/integration_test.exs | marcinbiegun/elixir-tanks | 29a3beef303825a137249c8ae0a3ff21c33d9a1c | [
"MIT"
] | null | null | null | test/tanks_game/integration_test.exs | marcinbiegun/elixir-tanks | 29a3beef303825a137249c8ae0a3ff21c33d9a1c | [
"MIT"
] | null | null | null | test/tanks_game/integration_test.exs | marcinbiegun/elixir-tanks | 29a3beef303825a137249c8ae0a3ff21c33d9a1c | [
"MIT"
] | null | null | null | defmodule Tanks.Game.IntegrationTest do
use ExUnit.Case
@game_id 123
@level nil
setup do
on_exit(fn ->
Tanks.GameServer.delete(@game_id)
end)
Tanks.GameServer.create(@game_id, @level, no_tick: true)
%{game_id: @game_id}
end
describe "entity registry" do
test "getting entity by ID" do
player =
Tanks.Game.Entity.Player.new()
|> Tanks.GameECS.add_entity(@game_id)
fetched_player = ECS.Registry.Entity.get(@game_id, Tanks.Game.Entity.Player, player.id)
assert fetched_player.id == player.id
assert fetched_player.components == player.components
end
end
describe "entities" do
test "creating and reloading entities" do
player =
Tanks.Game.Entity.Player.new()
|> Tanks.GameECS.add_entity(@game_id)
assert player.__struct__ == Tanks.Game.Entity.Player
assert player.components.position.__struct__ == Tanks.Game.Components.Position
player = ECS.Entity.reload(player)
assert player.__struct__ == Tanks.Game.Entity.Player
assert player.components.position.__struct__ == Tanks.Game.Components.Position
end
test "getting list of all entites of a given type" do
projectile1 =
Tanks.Game.Entity.Projectile.new(0, 0, 1, 0)
|> Tanks.GameECS.add_entity(@game_id)
projectile2 =
Tanks.Game.Entity.Projectile.new(10, 10, 0, 1)
|> Tanks.GameECS.add_entity(@game_id)
[fetched_projectile1, fetched_projectile2] =
ECS.Registry.Entity.all(@game_id, Tanks.Game.Entity.Projectile)
|> Enum.sort_by(& &1.id)
assert projectile1 == fetched_projectile1
assert projectile2 == fetched_projectile2
end
end
describe "velocity system" do
test "projectile movement" do
projectile =
Tanks.Game.Entity.Projectile.new(0, 0, 1, 2)
|> Tanks.GameECS.add_entity(@game_id)
assert projectile.components.position.state.x == 0
assert projectile.components.position.state.y == 0
Tanks.Game.System.Velocity.process(@game_id)
projectile = ECS.Entity.reload(projectile)
assert projectile.components.position.state.x == 1
assert projectile.components.position.state.y == 2
Tanks.Game.System.Velocity.process(@game_id)
projectile = ECS.Entity.reload(projectile)
assert projectile.components.position.state.x == 2
assert projectile.components.position.state.y == 4
end
end
describe "control system" do
test "player control" do
player =
Tanks.Game.Entity.Player.new()
|> Tanks.GameECS.add_entity(@game_id)
assert player.components.control.state.right == false
control = player.components.control
control.pid
|> ECS.Component.update(Map.put(control.state, :right, true))
player = ECS.Entity.reload(player)
assert player.components.control.state.right == true
assert player.components.position.state.x == 0
Tanks.Game.System.Movement.process(@game_id)
player = ECS.Entity.reload(player)
assert player.components.position.state.x == 5
end
end
describe "lifetime system" do
test "test dying" do
lifetime = 50
projectile =
Tanks.Game.Entity.Projectile.new(0, 0, 0, 0, lifetime)
|> Tanks.GameECS.add_entity(@game_id)
projectile_id = projectile.id
Tanks.Game.System.LifetimeDying.process(@game_id)
Tanks.Game.Server.Impl.build_client_state(@game_id)
assert [] = ECS.Queue.get(@game_id, :internal)
Process.sleep(lifetime + 1)
Tanks.Game.System.LifetimeDying.process(@game_id)
assert [
%Tanks.Game.Event.Destroy{
entity_id: ^projectile_id,
entity_module: Tanks.Game.Entity.Projectile
}
] = ECS.Queue.get(@game_id, :internal)
end
end
describe "collision system" do
test "projectile vs wall collision" do
projectile =
Tanks.Game.Entity.Projectile.new(0, 0, 0, 0)
|> Tanks.GameECS.add_entity(@game_id)
projectile_id = projectile.id
_wall =
Tanks.Game.Entity.Wall.new(0, 0)
|> Tanks.GameECS.add_entity(@game_id)
Tanks.Game.System.Collision.process(@game_id)
assert [
%Tanks.Game.Event.Destroy{
entity_id: ^projectile_id,
entity_module: Tanks.Game.Entity.Projectile
},
_
] = ECS.Queue.get(@game_id, :internal)
end
test "projectile vs zombie projectile collision" do
projectile =
Tanks.Game.Entity.Projectile.new(0, 0, 0, 0)
|> Tanks.GameECS.add_entity(@game_id)
projectile_id = projectile.id
zombie =
Tanks.Game.Entity.Zombie.new(0, 0)
|> Tanks.GameECS.add_entity(@game_id)
zombie_id = zombie.id
Tanks.Game.System.Collision.process(@game_id)
assert [
%Tanks.Game.Event.Destroy{
entity_id: ^projectile_id,
entity_module: Tanks.Game.Entity.Projectile
},
%Tanks.Game.Event.Destroy{
entity_id: ^zombie_id,
entity_module: Tanks.Game.Entity.Zombie
}
] = ECS.Queue.get(@game_id, :internal) |> Enum.sort_by(& &1.entity_module)
end
test "projectile vs board tile collision" do
projectile =
Tanks.Game.Entity.Projectile.new(0, 0, 0, 0)
|> Tanks.GameECS.add_entity(@game_id)
projectile_id = projectile.id
tile_size = 32
tiles = [
[:empty, :empty, :empty],
[:empty, :wall, :empty],
[:empty, :empty, :empty]
]
_board =
Tanks.Game.Entity.Board.new(tiles)
|> Tanks.GameECS.add_entity(@game_id)
Tanks.Game.System.Collision.process(@game_id)
assert [] == ECS.Queue.get(@game_id, :internal)
new_position = %{x: tile_size, y: tile_size}
ECS.Component.update(projectile.components.position.pid, new_position)
Tanks.Game.System.Collision.process(@game_id)
assert [
%Tanks.Game.Event.Destroy{
entity_id: ^projectile_id,
entity_module: Tanks.Game.Entity.Projectile
}
] = ECS.Queue.get(@game_id, :internal)
end
end
describe "health points" do
test "losing hp and dying" do
# Spawn wall
wall =
Tanks.Game.Entity.Wall.new(10, 10)
|> Tanks.GameECS.add_entity(@game_id)
assert wall.components.health.state.current == 5
# Projectile hits wall
projectile =
Tanks.Game.Entity.Projectile.new(10, 10, 0, 0)
|> Tanks.GameECS.add_entity(@game_id)
Tanks.Game.Server.Impl.tick(@game_id, 1)
refute ECS.Registry.Entity.get(@game_id, projectile.__struct__, projectile.id)
wall = ECS.Entity.reload(wall)
assert wall.components.health.state.current == 4
# Projectile hits wall
Tanks.Game.Entity.Projectile.new(10, 10, 0, 0)
|> Tanks.GameECS.add_entity(@game_id)
Tanks.Game.Server.Impl.tick(@game_id, 2)
wall = ECS.Entity.reload(wall)
assert wall.components.health.state.current == 3
# Projectile hits wall
Tanks.Game.Entity.Projectile.new(10, 10, 0, 0)
|> Tanks.GameECS.add_entity(@game_id)
Tanks.Game.Server.Impl.tick(@game_id, 4)
wall = ECS.Entity.reload(wall)
assert wall.components.health.state.current == 2
# Projectile hits wall
Tanks.Game.Entity.Projectile.new(10, 10, 0, 0)
|> Tanks.GameECS.add_entity(@game_id)
Tanks.Game.Server.Impl.tick(@game_id, 5)
wall = ECS.Entity.reload(wall)
assert wall.components.health.state.current == 1
# Projectile hits wall
Tanks.Game.Entity.Projectile.new(10, 10, 0, 0)
|> Tanks.GameECS.add_entity(@game_id)
Tanks.Game.Server.Impl.tick(@game_id, 6)
# Wall destroyed
refute ECS.Registry.Entity.get(@game_id, wall.__struct__, wall.id)
end
end
describe "position cache" do
test "detecting collisions" do
assert [] == Tanks.Game.Cache.Position.colliding_entities(@game_id, 0, 0, {:circle, 10})
projectile =
Tanks.Game.Entity.Projectile.new(0, 0, 0, 0)
|> Tanks.GameECS.add_entity(@game_id)
projectile_shape = projectile.components.size.state.shape
{:circle, projectile_diameter} = projectile_shape
assert [] == Tanks.Game.Cache.Position.colliding_entities(@game_id, 0, 0, {:circle, 10})
ECS.Registry.Entity.all(@game_id)
Tanks.Game.Cache.Position.update(@game_id)
assert [{Tanks.Game.Entity.Projectile, projectile.id}] ==
Tanks.Game.Cache.Position.colliding_entities(@game_id, 0, 0, {:circle, 1})
assert [] ==
Tanks.Game.Cache.Position.colliding_entities(
@game_id,
projectile_diameter / 2 + 2,
projectile_diameter / 2 + 2,
{:circle, 1}
)
assert [{Tanks.Game.Entity.Projectile, projectile.id}] ==
Tanks.Game.Cache.Position.colliding_entities(
@game_id,
projectile_diameter / 2 + 2,
projectile_diameter / 2 + 2,
{:circle, 10}
)
end
end
end
| 29.090062 | 94 | 0.623145 |
ff47556247e228bce3b00380ff9a77faa3d038ac | 9,172 | ex | Elixir | lib/membrane_h264_ffmpeg/parser.ex | geometerio/membrane_h264_ffmpeg_plugin | 87a348ea595c74684d1a6724b98718ba416c7b3e | [
"Apache-2.0"
] | 7 | 2021-01-30T07:12:03.000Z | 2021-12-12T05:28:29.000Z | lib/membrane_h264_ffmpeg/parser.ex | geometerio/membrane_h264_ffmpeg_plugin | 87a348ea595c74684d1a6724b98718ba416c7b3e | [
"Apache-2.0"
] | 9 | 2020-11-20T12:54:15.000Z | 2022-03-24T10:26:10.000Z | lib/membrane_h264_ffmpeg/parser.ex | geometerio/membrane_h264_ffmpeg_plugin | 87a348ea595c74684d1a6724b98718ba416c7b3e | [
"Apache-2.0"
] | 1 | 2021-06-21T23:33:50.000Z | 2021-06-21T23:33:50.000Z | defmodule Membrane.H264.FFmpeg.Parser do
@moduledoc """
Membrane element providing parser for H264 encoded video stream.
Uses the parser provided by FFmpeg.
By default, this parser splits the stream into h264 access units,
each of which is a sequence of NAL units corresponding to one
video frame, and equips them with the following metadata entries
under `:h264` key:
- `key_frame?: boolean` - determines whether the frame is a h264
I frame.
Setting custom packetization options affects metadata, see `alignment`
and `attach_nalus?` options for details.
"""
use Membrane.Filter
alias __MODULE__.{NALu, Native}
alias Membrane.Buffer
alias Membrane.Caps.Video.H264
require Membrane.Logger
def_input_pad :input,
demand_unit: :buffers,
caps: :any
def_output_pad :output,
caps: {H264, stream_format: :byte_stream}
def_options framerate: [
type: :framerate,
spec: H264.framerate_t(),
default: {0, 1},
description: """
Framerate of video stream, see `t:Membrane.Caps.Video.H264.framerate_t/0`
"""
],
sps: [
type: :binary,
default: <<>>,
description: """
Sequence Parameter Set NAL unit - if absent in the stream, should
be provided via this option.
"""
],
pps: [
type: :binary,
default: <<>>,
description: """
Picture Parameter Set NAL unit - if absent in the stream, should
be provided via this option.
"""
],
alignment: [
type: :atom,
spec: :au | :nal,
default: :au,
description: """
Stream units carried by each output buffer. See `t:Membrane.Caps.Video.H264.alignment_t`.
If alignment is `:nal`, the following metadata entries are added:
- `type` - h264 nalu type
- `new_access_unit: access_unit_metadata` - added whenever the new access unit starts.
`access_unit_metadata` is the metadata that would be merged into the buffer metadata
normally (if `alignment` was `:au`).
- `end_access_unit: true` - added for each NALu that ends an access unit.
"""
],
attach_nalus?: [
type: :boolean,
default: false,
description: """
Determines whether to attach NAL units list to the metadata when `alignment` option
is set to `:au`. For details see `t:Membrane.Caps.Video.H264.nalu_in_metadata_t/0`.
"""
],
skip_until_keyframe?: [
type: :boolean,
default: false,
description: """
Determines whether to drop the stream until the first key frame is received.
"""
]
@impl true
def handle_init(opts) do
state = %{
parser_ref: nil,
partial_frame: <<>>,
first_frame_prefix: opts.sps <> opts.pps,
framerate: opts.framerate,
alignment: opts.alignment,
attach_nalus?: opts.attach_nalus?,
skip_until_keyframe?: opts.skip_until_keyframe?,
metadata: nil,
timestamp: 0
}
{:ok, state}
end
@impl true
def handle_stopped_to_prepared(_ctx, state) do
with {:ok, parser_ref} <- Native.create() do
{:ok, %{state | parser_ref: parser_ref}}
else
{:error, reason} -> {{:error, reason}, state}
end
end
@impl true
def handle_prepared_to_playing(_ctx, %{skip_until_keyframe: true} = state) do
{{:ok, event: {:input, %Membrane.KeyframeRequestEvent{}}}, state}
end
@impl true
def handle_prepared_to_playing(_ctx, state) do
{:ok, state}
end
@impl true
def handle_demand(:output, _size, :buffers, _ctx, state) do
{{:ok, demand: :input}, state}
end
@impl true
def handle_process(:input, %Buffer{payload: payload, metadata: metadata}, ctx, state) do
payload =
if ctx.pads.output.start_of_stream? do
payload
else
state.first_frame_prefix <> payload
end
with {:ok, sizes, resolution_changes} <- Native.parse(payload, state.parser_ref) do
{bufs, state} = parse_access_units(payload, sizes, metadata, state)
actions = parse_resolution_changes(state, bufs, resolution_changes)
{{:ok, actions ++ [redemand: :output]}, state}
else
{:error, reason} -> {{:error, reason}, state}
end
end
# analize resolution changes and generate appropriate caps before corresponding buffers
defp parse_resolution_changes(state, bufs, resolution_changes, acc \\ [], index_offset \\ 0)
defp parse_resolution_changes(_state, bufs, [], acc, _index_offset) do
acc ++ [buffer: {:output, bufs}]
end
defp parse_resolution_changes(state, bufs, [meta | resolution_changes], acc, index_offset) do
updated_index = meta.index - index_offset
{old_bufs, next_bufs} = Enum.split(bufs, updated_index)
next_caps = mk_caps(state, meta.width, meta.height)
parse_resolution_changes(
state,
next_bufs,
resolution_changes,
acc ++ [buffer: {:output, old_bufs}, caps: {:output, next_caps}],
meta.index
)
end
@impl true
def handle_caps(:input, _caps, _ctx, state) do
# ignoring caps, new ones will be generated in handle_process
{:ok, state}
end
@impl true
def handle_end_of_stream(:input, _ctx, state) do
with {:ok, sizes} <- Native.flush(state.parser_ref) do
{bufs, state} = parse_access_units(<<>>, sizes, state.metadata, state)
if state.partial_frame != <<>> do
Membrane.Logger.warn("Discarding incomplete frame because of end of stream")
end
actions = [buffer: {:output, bufs}, end_of_stream: :output]
{{:ok, actions}, state}
end
end
@impl true
def handle_prepared_to_stopped(_ctx, state) do
{:ok, %{state | parser_ref: nil}}
end
defp parse_access_units(input, au_sizes, metadata, %{partial_frame: <<>>} = state) do
state = update_metadata(metadata, state)
{buffers, input, state} = do_parse_access_units(input, au_sizes, metadata, state, [])
{buffers, %{state | partial_frame: input}}
end
defp parse_access_units(input, [], _metadata, state) do
{[], %{state | partial_frame: state.partial_frame <> input}}
end
defp parse_access_units(input, [au_size | au_sizes], metadata, state) do
{first_au_buffers, input, state} =
do_parse_access_units(state.partial_frame <> input, [au_size], state.metadata, state, [])
state = update_metadata(metadata, state)
{buffers, input, state} = do_parse_access_units(input, au_sizes, metadata, state, [])
{first_au_buffers ++ buffers, %{state | partial_frame: input}}
end
defp do_parse_access_units(input, [], _metadata, state, acc) do
{Enum.reverse(acc), input, state}
end
defp do_parse_access_units(input, [au_size | au_sizes], metadata, state, acc) do
<<au::binary-size(au_size), rest::binary>> = input
# setting both :timestamp and :dts in order to maintain backward compatibility
metadata = Map.put(metadata, :timestamp, state.timestamp) |> Map.put(:dts, state.timestamp)
{nalus, au_metadata} = NALu.parse(au)
au_metadata = Map.merge(metadata, au_metadata)
state = Map.update!(state, :skip_until_keyframe?, &(&1 and not au_metadata.h264.key_frame?))
buffers =
case state do
%{skip_until_keyframe?: true} ->
[]
%{alignment: :au, attach_nalus?: true} ->
[%Buffer{payload: au, metadata: put_in(au_metadata, [:h264, :nalus], nalus)}]
%{alignment: :au, attach_nalus?: false} ->
[%Buffer{payload: au, metadata: au_metadata}]
%{alignment: :nal} ->
Enum.map(nalus, fn nalu ->
%Buffer{
payload: :binary.part(au, nalu.prefixed_poslen),
metadata: Map.merge(metadata, nalu.metadata)
}
end)
end
do_parse_access_units(rest, au_sizes, metadata, bump_timestamp(state), [buffers | acc])
end
defp update_metadata(%{timestamp: timestamp} = metadata, state) do
%{state | timestamp: timestamp, metadata: metadata}
end
defp update_metadata(metadata, state) do
%{state | metadata: metadata}
end
defp bump_timestamp(%{framerate: {0, _}} = state) do
state
end
defp bump_timestamp(state) do
use Ratio
%{timestamp: timestamp, framerate: {num, denom}} = state
timestamp = timestamp + Ratio.new(denom * Membrane.Time.second(), num)
%{state | timestamp: timestamp}
end
defp mk_caps(state, width, height) do
{:ok, profile} = Native.get_profile(state.parser_ref)
%H264{
width: width,
height: height,
framerate: state.framerate,
alignment: state.alignment,
nalu_in_metadata?: state.attach_nalus?,
stream_format: :byte_stream,
profile: profile
}
end
end
| 32.874552 | 105 | 0.616877 |
ff477288fe87d87709ada42be188c5023c5be945 | 12,141 | ex | Elixir | lib/mix/lib/mix/utils.ex | Tica2/elixir | 6cf1dcbfe4572fc75619f05e40c10fd0844083ef | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/utils.ex | Tica2/elixir | 6cf1dcbfe4572fc75619f05e40c10fd0844083ef | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/utils.ex | Tica2/elixir | 6cf1dcbfe4572fc75619f05e40c10fd0844083ef | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Utils do
@moduledoc """
Utilities used throughout Mix and tasks.
"""
@doc """
Get the mix home.
It defaults to `~/.mix` unless the `MIX_HOME`
environment variable is set.
Developers should only store entries in the
`MIX_HOME` directory which are guaranteed to
work across multiple Elixir versions, as it is
not recommended to swap the `MIX_HOME` directory
as configuration and other important data may be
stored there.
"""
def mix_home do
System.get_env("MIX_HOME") || Path.expand("~/.mix")
end
@doc """
Get all paths defined in the MIX_PATH env variable.
`MIX_PATH` may contain multiple paths. If on Windows, those
paths should be separated by `;`, if on unix systems, use `:`.
"""
def mix_paths do
if path = System.get_env("MIX_PATH") do
String.split(path, path_separator)
else
[]
end
end
defp path_separator do
case :os.type do
{:win32, _} -> ";"
{:unix, _} -> ":"
end
end
@doc """
Take a `command` name and attempts to load a module
with the command name converted to a module name
in the given `at` scope.
Returns `{:module, module}` in case a module
exists and is loaded, `{:error, reason}` otherwise.
## Examples
iex> Mix.Utils.command_to_module("compile", Mix.Tasks)
{:module, Mix.Tasks.Compile}
"""
def command_to_module(command, at \\ Elixir) do
module = Module.concat(at, command_to_module_name(command))
Code.ensure_loaded(module)
end
@doc """
Returns `true` if any of the `sources` are stale
compared to the given `targets`.
"""
def stale?(sources, targets) do
Enum.any? stale_stream(sources, targets)
end
@doc """
Extract all stale `sources` compared to the given `targets`.
"""
def extract_stale(_sources, []), do: []
def extract_stale([], _targets), do: []
def extract_stale(sources, targets) do
stale_stream(sources, targets) |> Enum.to_list
end
defp stale_stream(sources, targets) do
modified_target = targets |> Enum.map(&last_modified(&1)) |> Enum.min
Stream.filter(sources, fn(source) ->
last_modified(source) > modified_target
end)
end
@doc """
Returns the date the given path was last modified.
If the path does not exist, it returns the unix epoch
(1970-01-01 00:00:00).
"""
def last_modified(path)
def last_modified({{_, _, _}, {_, _, _}} = timestamp) do
timestamp
end
def last_modified(path) do
now = :calendar.local_time
case File.stat(path) do
{:ok, %File.Stat{mtime: mtime}} when mtime > now ->
Mix.shell.error("warning: mtime (modified time) for \"#{path}\" was set to the future, resetting to now")
File.touch!(path, now)
mtime
{:ok, %File.Stat{mtime: mtime}} ->
mtime
{:error, _} ->
{{1970, 1, 1}, {0, 0, 0}}
end
end
@doc """
Extract files from a list of paths.
`exts_or_pattern` may be a list of extensions or a
`Path.wildcard/1` pattern.
If the path in `paths` is a file, it is included in
the return result. If it is a directory, it is searched
recursively for files with the given extensions or matching
the given patterns.
"""
def extract_files(paths, exts_or_pattern)
def extract_files(paths, exts) when is_list(exts) do
extract_files(paths, "*.{#{Enum.join(exts, ",")}}")
end
def extract_files(paths, pattern) do
Enum.flat_map(paths, fn path ->
if File.regular?(path), do: [path], else: Path.wildcard("#{path}/**/#{pattern}")
end) |> Enum.uniq
end
@doc """
Converts the given atom or binary to underscore format.
If an atom is given, it is assumed to be an Elixir module,
so it is converted to a binary and then processed.
## Examples
iex> Mix.Utils.underscore "FooBar"
"foo_bar"
iex> Mix.Utils.underscore "Foo.Bar"
"foo/bar"
iex> Mix.Utils.underscore Foo.Bar
"foo/bar"
In general, `underscore` can be thought of as the reverse of
`camelize`, however, in some cases formatting may be lost:
iex> Mix.Utils.underscore "SAPExample"
"sap_example"
iex> Mix.Utils.camelize "sap_example"
"SapExample"
"""
def underscore(atom) when is_atom(atom) do
"Elixir." <> rest = Atom.to_string(atom)
underscore(rest)
end
def underscore(""), do: ""
def underscore(<<h, t :: binary>>) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<h, t, rest :: binary>>, _) when h in ?A..?Z and not (t in ?A..?Z or t == ?.) do
<<?_, to_lower_char(h), t>> <> do_underscore(rest, t)
end
defp do_underscore(<<h, t :: binary>>, prev) when h in ?A..?Z and not prev in ?A..?Z do
<<?_, to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<?., t :: binary>>, _) do
<<?/>> <> underscore(t)
end
defp do_underscore(<<h, t :: binary>>, _) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<>>, _) do
<<>>
end
@doc """
Converts the given string to CamelCase format.
## Examples
iex> Mix.Utils.camelize "foo_bar"
"FooBar"
"""
@spec camelize(String.t) :: String.t
def camelize(string)
def camelize(""),
do: ""
def camelize(<<?_, t :: binary>>),
do: camelize(t)
def camelize(<<h, t :: binary>>),
do: <<to_upper_char(h)>> <> do_camelize(t)
defp do_camelize(<<?_, ?_, t :: binary>>),
do: do_camelize(<< ?_, t :: binary >>)
defp do_camelize(<<?_, h, t :: binary>>) when h in ?a..?z,
do: <<to_upper_char(h)>> <> do_camelize(t)
defp do_camelize(<<?_>>),
do: <<>>
defp do_camelize(<<?/, t :: binary>>),
do: <<?.>> <> camelize(t)
defp do_camelize(<<h, t :: binary>>),
do: <<h>> <> do_camelize(t)
defp do_camelize(<<>>),
do: <<>>
@doc """
Takes a module and converts it to a command.
The nesting argument can be given in order to remove
the nesting of a module.
## Examples
iex> Mix.Utils.module_name_to_command(Mix.Tasks.Compile, 2)
"compile"
iex> Mix.Utils.module_name_to_command("Mix.Tasks.Compile.Elixir", 2)
"compile.elixir"
"""
def module_name_to_command(module, nesting \\ 0)
def module_name_to_command(module, nesting) when is_atom(module) do
module_name_to_command(inspect(module), nesting)
end
def module_name_to_command(module, nesting) do
t = Regex.split(~r/\./, to_string(module))
t |> Enum.drop(nesting) |> Enum.map(&underscore(&1)) |> Enum.join(".")
end
@doc """
Takes a command and converts it to the module name format.
## Examples
iex> Mix.Utils.command_to_module_name("compile.elixir")
"Compile.Elixir"
"""
def command_to_module_name(s) do
Regex.split(~r/\./, to_string(s)) |>
Enum.map(&camelize(&1)) |>
Enum.join(".")
end
defp to_upper_char(char) when char in ?a..?z, do: char - 32
defp to_upper_char(char), do: char
defp to_lower_char(char) when char in ?A..?Z, do: char + 32
defp to_lower_char(char), do: char
@doc """
Symlink directory `source` to `target` or copy it recursively
in case symlink fails.
Expect source and target to be absolute paths as it generates
a relative symlink.
"""
def symlink_or_copy(source, target) do
if File.exists?(source) do
# Relative symbolic links on windows are broken
link = case :os.type do
{:win32, _} -> source
_ -> make_relative_path(source, target)
end |> String.to_char_list
case :file.read_link(target) do
{:ok, ^link} ->
:ok
{:ok, _} ->
File.rm!(target)
do_symlink_or_copy(source, target, link)
{:error, :enoent} ->
do_symlink_or_copy(source, target, link)
{:error, _} ->
_ = File.rm_rf!(target)
do_symlink_or_copy(source, target, link)
end
else
{:error, :enoent}
end
end
defp do_symlink_or_copy(source, target, link) do
case :file.make_symlink(link, target) do
:ok -> :ok
{:error, _} -> {:ok, File.cp_r!(source, target)}
end
end
# Make a relative path between the two given paths.
# Expects both paths to be fully expanded.
defp make_relative_path(source, target) do
do_make_relative_path(Path.split(source), Path.split(target))
end
defp do_make_relative_path([h|t1], [h|t2]) do
do_make_relative_path(t1, t2)
end
defp do_make_relative_path(source, target) do
base = List.duplicate("..", max(length(target) - 1, 0))
Path.join(base ++ source)
end
@doc """
Opens and reads content from either a URL or a local filesystem path
and returns the contents as a `{:ok, binary}`, `:badpath` for invalid
paths or `{:local, message}` for local errors and `{:remote, message}`
for remote ones.
## Options
* `:sha512` - checks against the given sha512 checksum. Returns
`{:checksum, message}` in case it fails
"""
@spec read_path(String.t, Keyword.t) ::
{:ok, binary} | :badpath | {:remote, String.t} |
{:local, String.t} | {:checksum, String.t}
def read_path(path, opts \\ []) do
cond do
url?(path) ->
read_httpc(path) |> checksum(opts)
file?(path) ->
read_file(path) |> checksum(opts)
true ->
:badpath
end
end
@checksums [:sha512]
defp checksum({:ok, binary} = return, opts) do
Enum.find_value @checksums, return, fn hash ->
if (expected = Keyword.get(opts, hash)) &&
(actual = hexhash(binary, hash)) &&
expected != actual do
{:checksum, """
Data does not match the given sha512 checksum.
Expected: #{expected}
Actual: #{actual}
"""}
end
end
end
defp checksum({_, _} = error, _opts) do
error
end
defp hexhash(binary, hash) do
Base.encode16 :crypto.hash(hash, binary), case: :lower
end
@doc """
Prompts the user to overwrite the file if it exists. Returns
the user input.
"""
def can_write?(path) do
if File.exists?(path) do
full = Path.expand(path)
Mix.shell.yes?(Path.relative_to_cwd(full) <> " already exists, overwrite?")
else
true
end
end
defp read_file(path) do
try do
{:ok, File.read!(path)}
rescue
e in [File.Error] -> {:local, Exception.message(e)}
end
end
defp read_httpc(path) do
{:ok, _} = Application.ensure_all_started(:ssl)
{:ok, _} = Application.ensure_all_started(:inets)
# Starting a http client profile allows us to scope
# the effects of using a http proxy to this function
{:ok, _pid} = :inets.start(:httpc, [{:profile, :mix}])
headers = [{'user-agent', 'Mix/#{System.version}'}]
request = {:binary.bin_to_list(path), headers}
# If a proxy environment variable was supplied add a proxy to httpc
http_proxy = System.get_env("HTTP_PROXY") || System.get_env("http_proxy")
https_proxy = System.get_env("HTTPS_PROXY") || System.get_env("https_proxy")
if http_proxy, do: proxy(:proxy, http_proxy)
if https_proxy, do: proxy(:https_proxy, https_proxy)
# We are using relaxed: true because some servers is returning a Location
# header with relative paths, which does not follow the spec. This would
# cause the request to fail with {:error, :no_scheme} unless :relaxed
# is given.
case :httpc.request(:get, request, [relaxed: true], [body_format: :binary], :mix) do
{:ok, {{_, status, _}, _, body}} when status in 200..299 ->
{:ok, body}
{:ok, {{_, status, _}, _, _}} ->
{:remote, "httpc request failed with: {:bad_status_code, #{status}}"}
{:error, reason} ->
{:remote, "httpc request failed with: #{inspect reason}"}
end
after
:inets.stop(:httpc, :mix)
end
defp file?(path) do
File.regular?(path)
end
defp url?(path) do
URI.parse(path).scheme in ["http", "https"]
end
defp proxy(proxy_scheme, proxy) do
uri = URI.parse(proxy)
if uri.host && uri.port do
host = String.to_char_list(uri.host)
:httpc.set_options([{proxy_scheme, {{host, uri.port}, []}}], :mix)
end
end
end
| 26.508734 | 113 | 0.623589 |
ff47d160cdce864a8fa131a312031c96462da8b9 | 1,413 | ex | Elixir | lib/pathex/parser.ex | sirikid/pathex | e0bfa4ab6e0b4af4da05479df842a0d07f685707 | [
"BSD-2-Clause"
] | null | null | null | lib/pathex/parser.ex | sirikid/pathex | e0bfa4ab6e0b4af4da05479df842a0d07f685707 | [
"BSD-2-Clause"
] | null | null | null | lib/pathex/parser.ex | sirikid/pathex | e0bfa4ab6e0b4af4da05479df842a0d07f685707 | [
"BSD-2-Clause"
] | null | null | null | defmodule Pathex.Parser do
@moduledoc """
Module for parsing path created with sigils
"""
# TODO proper naive parsing
@spec parse(binary(), Pathex.mod()) :: Pathex.Combination.t()
def parse(string, :naive) do
string
|> String.split("/")
|> Enum.map(&detect_naive(String.trim(&1)))
end
def parse(string, :json) do
string
|> String.split("/")
|> Enum.map(&detect_json(String.trim(&1)))
end
def parse(string, :map) do
string
|> String.split("/")
|> Enum.map(&detect_map(String.trim(&1)))
end
defp detect_map(str) do
astr = String.to_atom(str)
case Integer.parse(str) do
{istr, ""} ->
[map: str, map: istr, map: astr]
_ ->
[map: astr, map: str]
end
end
defp detect_json(str) do
case Integer.parse(str) do
{istr, ""} ->
[map: str, list: istr]
_ ->
[map: str]
end
end
defp detect_naive("\"" <> str) do
case String.trim_trailing(str, "\"") do
^str ->
raise "Bad string in naive mod: #{str}"
other ->
[map: other]
end
end
defp detect_naive(":" <> str) do
astr = String.to_atom(str)
[map: astr, keyword: astr]
end
defp detect_naive(str) do
case Integer.parse(str) do
{istr, ""} ->
[list: istr, map: istr, tuple: istr]
_ ->
raise "Bad string in naive mod: #{str}"
end
end
end
| 19.356164 | 63 | 0.55414 |
ff47d2383b2f5d4c1030ce37ee1f1b086b0b3cab | 549 | exs | Elixir | test/absinthe/integration/validation/error_result_when_bad_list_argument_test.exs | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 4,101 | 2016-03-02T03:49:20.000Z | 2022-03-31T05:46:01.000Z | test/absinthe/integration/validation/error_result_when_bad_list_argument_test.exs | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 889 | 2016-03-02T16:06:59.000Z | 2022-03-31T20:24:12.000Z | test/absinthe/integration/validation/error_result_when_bad_list_argument_test.exs | TheRealReal/absinthe | 6eae5bc36283e58f42d032b8afd90de3ad64f97b | [
"MIT"
] | 564 | 2016-03-02T07:49:59.000Z | 2022-03-06T14:40:59.000Z | defmodule Elixir.Absinthe.Integration.Validation.ErrorResultWhenBadListArgumentTest do
use Absinthe.Case, async: true
@query """
query {
thing(id: ["foo"]) {
name
}
}
"""
test "scenario #1" do
assert {:ok,
%{
errors: [
%{
message: "Argument \"id\" has invalid value [\"foo\"].",
locations: [%{column: 9, line: 2}]
}
]
}} == Absinthe.run(@query, Absinthe.Fixtures.Things.MacroSchema, [])
end
end
| 22.875 | 86 | 0.48816 |
ff47de4fca42124519d98e0fd8b405d6d655188f | 16,903 | exs | Elixir | test/empty/accounts_test.exs | manojsamanta/empty-app-with-auth | e601a7c6ef1b4a992758117e6bf0922a5ce08f04 | [
"MIT"
] | null | null | null | test/empty/accounts_test.exs | manojsamanta/empty-app-with-auth | e601a7c6ef1b4a992758117e6bf0922a5ce08f04 | [
"MIT"
] | null | null | null | test/empty/accounts_test.exs | manojsamanta/empty-app-with-auth | e601a7c6ef1b4a992758117e6bf0922a5ce08f04 | [
"MIT"
] | null | null | null | defmodule Empty.AccountsTest do
use Empty.DataCase
alias Empty.Accounts
import Empty.AccountsFixtures
alias Empty.Accounts.{User, UserToken}
describe "get_user_by_email/1" do
test "does not return the user if the email does not exist" do
refute Accounts.get_user_by_email("unknown@example.com")
end
test "returns the user if the email exists" do
%{id: id} = user = user_fixture()
assert %User{id: ^id} = Accounts.get_user_by_email(user.email)
end
end
describe "get_user_by_email_and_password/2" do
test "does not return the user if the email does not exist" do
assert {:error, :bad_username_or_password} ==
Accounts.get_user_by_email_and_password("unknown@example.com", "hello world!")
end
test "does not return the user if the password is not valid" do
user = user_fixture()
assert {:error, :bad_username_or_password} ==
Accounts.get_user_by_email_and_password(user.email, "invalid")
end
test "returns the user if the email and password are valid" do
%{id: id} = user = user_fixture()
assert {:ok, %User{id: ^id}} =
Accounts.get_user_by_email_and_password(user.email, valid_user_password())
end
test "does not return the user if their account has not been confirmed" do
user = user_fixture(%{}, confirmed: false)
assert {:error, :not_confirmed} ==
Accounts.get_user_by_email_and_password(user.email, valid_user_password())
end
end
describe "get_user!/1" do
test "raises if id is invalid" do
assert_raise Ecto.NoResultsError, fn ->
Accounts.get_user!(-1)
end
end
test "returns the user with the given id" do
%{id: id} = user = user_fixture()
assert %User{id: ^id} = Accounts.get_user!(user.id)
end
end
describe "register_user/1" do
test "requires email and password to be set" do
{:error, changeset} = Accounts.register_user(%{})
assert %{
password: ["can't be blank"],
email: ["can't be blank"]
} = errors_on(changeset)
end
test "validates email and password when given" do
{:error, changeset} = Accounts.register_user(%{
email: "not valid",
password: "not valid",
password_confirmation: "not matching"
})
assert %{
email: ["must have the @ sign and no spaces"],
password: ["should be at least 12 character(s)"],
password_confirmation: ["does not match password"]
} = errors_on(changeset)
end
test "validates maximum values for email and password for security" do
too_long = String.duplicate("db", 100)
{:error, changeset} = Accounts.register_user(%{email: too_long, password: too_long})
assert "should be at most 160 character(s)" in errors_on(changeset).email
assert "should be at most 80 character(s)" in errors_on(changeset).password
end
test "validates email uniqueness" do
%{email: email} = user_fixture()
{:error, changeset} = Accounts.register_user(%{email: email})
assert "has already been taken" in errors_on(changeset).email
# Now try with the upper cased email too, to check that email case is ignored.
{:error, changeset} = Accounts.register_user(%{email: String.upcase(email)})
assert "has already been taken" in errors_on(changeset).email
end
test "registers users with a hashed password" do
email = unique_user_email()
{:ok, user} = Accounts.register_user(%{
email: email,
password: valid_user_password(),
password_confirmation: valid_user_password()
})
assert user.email == email
assert is_binary(user.hashed_password)
assert is_nil(user.confirmed_at)
assert is_nil(user.password)
end
end
describe "change_user_registration/2" do
test "returns a changeset" do
assert %Ecto.Changeset{} = changeset = Accounts.change_user_registration(%User{})
assert changeset.required == [:password, :email]
end
end
describe "change_user_email/2" do
test "returns a user changeset" do
assert %Ecto.Changeset{} = changeset = Accounts.change_user_email(%User{})
assert changeset.required == [:email]
end
end
describe "apply_user_email/3" do
setup do
%{user: user_fixture()}
end
test "requires email to change", %{user: user} do
{:error, changeset} = Accounts.apply_user_email(user, valid_user_password(), %{})
assert %{email: ["did not change"]} = errors_on(changeset)
end
test "validates email", %{user: user} do
{:error, changeset} =
Accounts.apply_user_email(user, valid_user_password(), %{email: "not valid"})
assert %{email: ["must have the @ sign and no spaces"]} = errors_on(changeset)
end
test "validates maximum value for email for security", %{user: user} do
too_long = String.duplicate("db", 100)
{:error, changeset} =
Accounts.apply_user_email(user, valid_user_password(), %{email: too_long})
assert "should be at most 160 character(s)" in errors_on(changeset).email
end
test "validates email uniqueness", %{user: user} do
%{email: email} = user_fixture()
{:error, changeset} =
Accounts.apply_user_email(user, valid_user_password(), %{email: email})
assert "has already been taken" in errors_on(changeset).email
end
test "validates current password", %{user: user} do
{:error, changeset} =
Accounts.apply_user_email(user, "invalid", %{email: unique_user_email()})
assert %{current_password: ["is not valid"]} = errors_on(changeset)
end
test "applies the email without persisting it", %{user: user} do
email = unique_user_email()
{:ok, user} = Accounts.apply_user_email(user, valid_user_password(), %{email: email})
assert user.email == email
assert Accounts.get_user!(user.id).email != email
end
end
describe "deliver_update_email_instructions/3" do
setup do
%{user: user_fixture()}
end
test "sends token through notification", %{user: user} do
token =
extract_user_token(fn url ->
Accounts.deliver_update_email_instructions(user, "current@example.com", url)
end)
{:ok, token} = Base.url_decode64(token, padding: false)
assert user_token = Repo.get_by(UserToken, token: :crypto.hash(:sha256, token))
assert user_token.user_id == user.id
assert user_token.sent_to == user.email
assert user_token.context == "change:current@example.com"
end
end
describe "update_user_email/2" do
setup do
user = user_fixture()
email = unique_user_email()
token =
extract_user_token(fn url ->
Accounts.deliver_update_email_instructions(%{user | email: email}, user.email, url)
end)
%{user: user, token: token, email: email}
end
test "updates the email with a valid token", %{user: user, token: token, email: email} do
assert Accounts.update_user_email(user, token) == :ok
changed_user = Repo.get!(User, user.id)
assert changed_user.email != user.email
assert changed_user.email == email
assert changed_user.confirmed_at
assert changed_user.confirmed_at != user.confirmed_at
refute Repo.get_by(UserToken, user_id: user.id)
end
test "does not update email with invalid token", %{user: user} do
assert Accounts.update_user_email(user, "oops") == :error
assert Repo.get!(User, user.id).email == user.email
assert Repo.get_by(UserToken, user_id: user.id)
end
test "does not update email if user email changed", %{user: user, token: token} do
assert Accounts.update_user_email(%{user | email: "current@example.com"}, token) == :error
assert Repo.get!(User, user.id).email == user.email
assert Repo.get_by(UserToken, user_id: user.id)
end
test "does not update email if token expired", %{user: user, token: token} do
{1, nil} = Repo.update_all(UserToken, set: [inserted_at: ~N[2020-01-01 00:00:00]])
assert Accounts.update_user_email(user, token) == :error
assert Repo.get!(User, user.id).email == user.email
assert Repo.get_by(UserToken, user_id: user.id)
end
end
describe "change_user_password/2" do
test "returns a user changeset" do
assert %Ecto.Changeset{} = changeset = Accounts.change_user_password(%User{})
assert changeset.required == [:password]
end
end
describe "update_user_password/3" do
setup do
%{user: user_fixture()}
end
test "validates password", %{user: user} do
{:error, changeset} =
Accounts.update_user_password(user, valid_user_password(), %{
password: "not valid",
password_confirmation: "another"
})
assert %{
password: ["should be at least 12 character(s)"],
password_confirmation: ["does not match password"]
} = errors_on(changeset)
end
test "validates maximum values for password for security", %{user: user} do
too_long = String.duplicate("db", 100)
{:error, changeset} =
Accounts.update_user_password(user, valid_user_password(), %{password: too_long})
assert "should be at most 80 character(s)" in errors_on(changeset).password
end
test "validates current password", %{user: user} do
{:error, changeset} =
Accounts.update_user_password(user, "invalid", %{password: valid_user_password()})
assert %{current_password: ["is not valid"]} = errors_on(changeset)
end
test "updates the password", %{user: user} do
{:ok, user} =
Accounts.update_user_password(user, valid_user_password(), %{
password: "new valid password"
})
assert is_nil(user.password)
assert Accounts.get_user_by_email_and_password(user.email, "new valid password")
end
test "deletes all tokens for the given user", %{user: user} do
_ = Accounts.generate_user_session_token(user)
{:ok, _} =
Accounts.update_user_password(user, valid_user_password(), %{
password: "new valid password"
})
refute Repo.get_by(UserToken, user_id: user.id)
end
end
describe "generate_user_session_token/1" do
setup do
%{user: user_fixture()}
end
test "generates a token", %{user: user} do
token = Accounts.generate_user_session_token(user)
assert user_token = Repo.get_by(UserToken, token: token)
assert user_token.context == "session"
# Creating the same token for another user should fail
assert_raise Ecto.ConstraintError, fn ->
Repo.insert!(%UserToken{
token: user_token.token,
user_id: user_fixture().id,
context: "session"
})
end
end
end
describe "get_user_by_session_token/1" do
setup do
user = user_fixture()
token = Accounts.generate_user_session_token(user)
%{user: user, token: token}
end
test "returns user by token", %{user: user, token: token} do
assert session_user = Accounts.get_user_by_session_token(token)
assert session_user.id == user.id
end
test "does not return user for invalid token" do
refute Accounts.get_user_by_session_token("oops")
end
test "does not return user for expired token", %{token: token} do
{1, nil} = Repo.update_all(UserToken, set: [inserted_at: ~N[2020-01-01 00:00:00]])
refute Accounts.get_user_by_session_token(token)
end
end
describe "delete_session_token/1" do
test "deletes the token" do
user = user_fixture()
token = Accounts.generate_user_session_token(user)
assert Accounts.delete_session_token(token) == :ok
refute Accounts.get_user_by_session_token(token)
end
end
describe "deliver_user_confirmation_instructions/2" do
setup do
%{user: user_fixture()}
end
test "sends token through notification", %{user: user} do
token =
extract_user_token(fn url ->
Accounts.deliver_user_confirmation_instructions(user, url)
end)
{:ok, token} = Base.url_decode64(token, padding: false)
assert user_token = Repo.get_by(UserToken, token: :crypto.hash(:sha256, token))
assert user_token.user_id == user.id
assert user_token.sent_to == user.email
assert user_token.context == "confirm"
end
end
describe "confirm_user/2" do
setup do
user = user_fixture( %{}, confirmed: false)
token =
extract_user_token(fn url ->
Accounts.deliver_user_confirmation_instructions(user, url)
end)
%{user: user, token: token}
end
test "confirms the email with a valid token", %{user: user, token: token} do
assert {:ok, confirmed_user} = Accounts.confirm_user(token)
assert confirmed_user.confirmed_at
assert confirmed_user.confirmed_at != user.confirmed_at
assert Repo.get!(User, user.id).confirmed_at
refute Repo.get_by(UserToken, user_id: user.id)
end
test "does not confirm with invalid token", %{user: user} do
assert Accounts.confirm_user("oops") == :error
refute Repo.get!(User, user.id).confirmed_at
assert Repo.get_by(UserToken, user_id: user.id)
end
test "does not confirm email if token expired", %{user: user, token: token} do
{1, nil} = Repo.update_all(UserToken, set: [inserted_at: ~N[2020-01-01 00:00:00]])
assert Accounts.confirm_user(token) == :error
refute Repo.get!(User, user.id).confirmed_at
assert Repo.get_by(UserToken, user_id: user.id)
end
end
describe "deliver_user_reset_password_instructions/2" do
setup do
%{user: user_fixture()}
end
test "sends token through notification", %{user: user} do
token =
extract_user_token(fn url ->
Accounts.deliver_user_reset_password_instructions(user, url)
end)
{:ok, token} = Base.url_decode64(token, padding: false)
assert user_token = Repo.get_by(UserToken, token: :crypto.hash(:sha256, token))
assert user_token.user_id == user.id
assert user_token.sent_to == user.email
assert user_token.context == "reset_password"
end
end
describe "get_user_by_reset_password_token/1" do
setup do
user = user_fixture()
token =
extract_user_token(fn url ->
Accounts.deliver_user_reset_password_instructions(user, url)
end)
%{user: user, token: token}
end
test "returns the user with valid token", %{user: %{id: id}, token: token} do
assert %User{id: ^id} = Accounts.get_user_by_reset_password_token(token)
assert Repo.get_by(UserToken, user_id: id)
end
test "does not return the user with invalid token", %{user: user} do
refute Accounts.get_user_by_reset_password_token("oops")
assert Repo.get_by(UserToken, user_id: user.id)
end
test "does not return the user if token expired", %{user: user, token: token} do
{1, nil} = Repo.update_all(UserToken, set: [inserted_at: ~N[2020-01-01 00:00:00]])
refute Accounts.get_user_by_reset_password_token(token)
assert Repo.get_by(UserToken, user_id: user.id)
end
end
describe "reset_user_password/2" do
setup do
%{user: user_fixture()}
end
test "validates password", %{user: user} do
{:error, changeset} =
Accounts.reset_user_password(user, %{
password: "not valid",
password_confirmation: "another"
})
assert %{
password: ["should be at least 12 character(s)"],
password_confirmation: ["does not match password"]
} = errors_on(changeset)
end
test "validates maximum values for password for security", %{user: user} do
too_long = String.duplicate("db", 100)
{:error, changeset} = Accounts.reset_user_password(user, %{password: too_long})
assert "should be at most 80 character(s)" in errors_on(changeset).password
end
test "updates the password", %{user: user} do
{:ok, updated_user} = Accounts.reset_user_password(user, %{password: "new valid password"})
assert is_nil(updated_user.password)
assert Accounts.get_user_by_email_and_password(user.email, "new valid password")
end
test "deletes all tokens for the given user", %{user: user} do
_ = Accounts.generate_user_session_token(user)
{:ok, _} = Accounts.reset_user_password(user, %{password: "new valid password"})
refute Repo.get_by(UserToken, user_id: user.id)
end
end
describe "inspect/2" do
test "does not include password" do
refute inspect(%User{password: "123456"}) =~ "password: \"123456\""
end
end
end
| 33.873747 | 97 | 0.657339 |
ff47f734553e1773507be1c1f837f9f6f5e60943 | 2,062 | ex | Elixir | clients/container/lib/google_api/container/v1/model/time_window.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/container/lib/google_api/container/v1/model/time_window.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/container/lib/google_api/container/v1/model/time_window.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Container.V1.Model.TimeWindow do
@moduledoc """
Represents an arbitrary window of time.
## Attributes
* `endTime` (*type:* `DateTime.t`, *default:* `nil`) - The time that the window ends. The end time should take place after the start time.
* `maintenanceExclusionOptions` (*type:* `GoogleApi.Container.V1.Model.MaintenanceExclusionOptions.t`, *default:* `nil`) - MaintenanceExclusionOptions provides maintenance exclusion related options.
* `startTime` (*type:* `DateTime.t`, *default:* `nil`) - The time that the window first starts.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:endTime => DateTime.t() | nil,
:maintenanceExclusionOptions =>
GoogleApi.Container.V1.Model.MaintenanceExclusionOptions.t() | nil,
:startTime => DateTime.t() | nil
}
field(:endTime, as: DateTime)
field(:maintenanceExclusionOptions, as: GoogleApi.Container.V1.Model.MaintenanceExclusionOptions)
field(:startTime, as: DateTime)
end
defimpl Poison.Decoder, for: GoogleApi.Container.V1.Model.TimeWindow do
def decode(value, options) do
GoogleApi.Container.V1.Model.TimeWindow.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Container.V1.Model.TimeWindow do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.821429 | 202 | 0.731329 |
ff4801bb95d2c62978f50d2d2e161f3fbd02c7e3 | 900 | ex | Elixir | lib/nerves_hub_link_common/message/update_info.ex | nerves-hub/nerves_hub_link_common | c8a00a6e8ebafdb1bd3f9bf89e053c751d6bf430 | [
"Apache-2.0"
] | 1 | 2021-03-15T02:26:05.000Z | 2021-03-15T02:26:05.000Z | lib/nerves_hub_link_common/message/update_info.ex | nerves-hub/nerves_hub_link_common | c8a00a6e8ebafdb1bd3f9bf89e053c751d6bf430 | [
"Apache-2.0"
] | 5 | 2021-02-11T16:18:14.000Z | 2022-03-01T15:12:12.000Z | lib/nerves_hub_link_common/message/update_info.ex | nerves-hub/nerves_hub_link_common | c8a00a6e8ebafdb1bd3f9bf89e053c751d6bf430 | [
"Apache-2.0"
] | null | null | null | defmodule NervesHubLinkCommon.Message.UpdateInfo do
@moduledoc """
"""
alias NervesHubLinkCommon.Message.FirmwareMetadata
defstruct [:firmware_url, :firmware_meta]
@typedoc """
Payload that gets dispatched down to devices upon an update
`firmware_url` and `firmware_meta` are only available
when `update_available` is true.
"""
@type t() :: %__MODULE__{
firmware_url: URI.t(),
firmware_meta: FirmwareMetadata.t()
}
@doc "Parse an update message from NervesHub"
@spec parse(map()) :: {:ok, t()} | {:error, :invalid_params}
def parse(%{"firmware_meta" => %{} = meta, "firmware_url" => url}) do
with {:ok, firmware_meta} <- FirmwareMetadata.parse(meta) do
{:ok,
%__MODULE__{
firmware_url: URI.parse(url),
firmware_meta: firmware_meta
}}
end
end
def parse(_), do: {:error, :invalid_params}
end
| 26.470588 | 71 | 0.647778 |
ff48055bcd6823325a84cb9748131e3862369aff | 2,471 | ex | Elixir | clients/monitoring/lib/google_api/monitoring/v3/model/type.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/monitoring/lib/google_api/monitoring/v3/model/type.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/monitoring/lib/google_api/monitoring/v3/model/type.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Monitoring.V3.Model.Type do
@moduledoc """
A protocol buffer message type.
## Attributes
* `fields` (*type:* `list(GoogleApi.Monitoring.V3.Model.Field.t)`, *default:* `nil`) - The list of fields.
* `name` (*type:* `String.t`, *default:* `nil`) - The fully qualified message name.
* `oneofs` (*type:* `list(String.t)`, *default:* `nil`) - The list of types appearing in oneof definitions in this type.
* `options` (*type:* `list(GoogleApi.Monitoring.V3.Model.Option.t)`, *default:* `nil`) - The protocol buffer options.
* `sourceContext` (*type:* `GoogleApi.Monitoring.V3.Model.SourceContext.t`, *default:* `nil`) - The source context.
* `syntax` (*type:* `String.t`, *default:* `nil`) - The source syntax.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:fields => list(GoogleApi.Monitoring.V3.Model.Field.t()),
:name => String.t(),
:oneofs => list(String.t()),
:options => list(GoogleApi.Monitoring.V3.Model.Option.t()),
:sourceContext => GoogleApi.Monitoring.V3.Model.SourceContext.t(),
:syntax => String.t()
}
field(:fields, as: GoogleApi.Monitoring.V3.Model.Field, type: :list)
field(:name)
field(:oneofs, type: :list)
field(:options, as: GoogleApi.Monitoring.V3.Model.Option, type: :list)
field(:sourceContext, as: GoogleApi.Monitoring.V3.Model.SourceContext)
field(:syntax)
end
defimpl Poison.Decoder, for: GoogleApi.Monitoring.V3.Model.Type do
def decode(value, options) do
GoogleApi.Monitoring.V3.Model.Type.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Monitoring.V3.Model.Type do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.854839 | 124 | 0.695265 |
ff480b703bcfbc6faf3f017f75b4ca805a7e1994 | 2,351 | exs | Elixir | mix.exs | petermm/littlechat | b8672165ab5e6efd0d501f291de682a40b37a7b7 | [
"MIT"
] | 166 | 2020-07-15T14:47:19.000Z | 2022-03-25T03:57:35.000Z | mix.exs | Jurshsmith/littlechat | 50fac2f907abbfcd574d31b4d4bdad7e51302da7 | [
"MIT"
] | 12 | 2020-07-01T23:32:47.000Z | 2021-03-18T21:21:28.000Z | mix.exs | Jurshsmith/littlechat | 50fac2f907abbfcd574d31b4d4bdad7e51302da7 | [
"MIT"
] | 21 | 2020-07-15T14:59:39.000Z | 2022-03-20T21:05:16.000Z | defmodule Littlechat.MixProject do
use Mix.Project
def project do
[
app: :littlechat,
version: "0.5.0",
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps(),
releases: [
littlechat: [
include_erts: true,
include_executables_for: [:unix],
applications: [
runtime_tools: :permanent
]
]
]
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Littlechat.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.5.4"},
{:phoenix_ecto, "~> 4.2"},
{:ecto_sql, "~> 3.4"},
{:postgrex, ">= 0.0.0"},
{:phoenix_live_view, "~> 0.14.4"},
{:floki, ">= 0.0.0", only: :test},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:telemetry_metrics, "~> 0.4"},
{:telemetry_poller, "~> 0.4"},
{:gettext, "~> 0.18"},
{:jason, "~> 1.1"},
{:plug_cowboy, "~> 2.0"},
{:uuid, "~> 1.1"},
{:distillery, "~> 2.0"},
{:stun, "~> 1.0"}, # See https://github.com/processone/ejabberd/issues/1107#issuecomment-217828211 if you have errors installing this on macOS.
{:sentry, "~> 8.0-rc.2"},
{:hackney, "~> 1.8"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get", "ecto.setup", "cmd npm install --prefix assets"],
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"]
]
end
end
| 29.024691 | 149 | 0.562739 |
ff482fe1a1a599ef028912541949fa49092c17f0 | 1,113 | exs | Elixir | 2016/day08/config/config.exs | matt-thomson/advent-of-code | feff903151284240a9d3f0c84cdfe52d8d11ef06 | [
"MIT"
] | null | null | null | 2016/day08/config/config.exs | matt-thomson/advent-of-code | feff903151284240a9d3f0c84cdfe52d8d11ef06 | [
"MIT"
] | null | null | null | 2016/day08/config/config.exs | matt-thomson/advent-of-code | feff903151284240a9d3f0c84cdfe52d8d11ef06 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :day08, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:day08, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 35.903226 | 73 | 0.750225 |
ff484945b46070908abf91ddc6bd7b609872b286 | 1,623 | ex | Elixir | lib/bank_web/endpoint.ex | oPauloChaves/elixir-checking-account-api | 1fb5d241b40ecbaf11bb4fcbdb5ccabafd635acd | [
"MIT"
] | 3 | 2017-10-16T14:35:46.000Z | 2017-12-13T22:35:43.000Z | lib/bank_web/endpoint.ex | oPauloChaves/elixir-checking-account-api | 1fb5d241b40ecbaf11bb4fcbdb5ccabafd635acd | [
"MIT"
] | null | null | null | lib/bank_web/endpoint.ex | oPauloChaves/elixir-checking-account-api | 1fb5d241b40ecbaf11bb4fcbdb5ccabafd635acd | [
"MIT"
] | null | null | null | defmodule BankWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :bank
alias Bank.Bucket.Registry
socket "/socket", BankWeb.UserSocket
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/", from: :bank, gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Poison
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_bank_key",
signing_salt: "oT3daavo"
plug BankWeb.Router
@doc """
Callback invoked for dynamically configuring the endpoint.
It receives the endpoint configuration and checks if
configuration should be loaded from the system environment.
"""
def init(_key, config) do
Registry.create(Registry, "operations")
if config[:load_from_system_env] do
port = System.get_env("PORT") || raise "expected the PORT environment variable to be set"
{:ok, Keyword.put(config, :http, [:inet6, port: port])}
else
{:ok, config}
end
end
end
| 27.05 | 95 | 0.701171 |
ff4862e23115181ca5ac8e49ac14dafa342ba189 | 489 | ex | Elixir | lib/rasa_api/model/tracker_active_form.ex | whitedr/rasa-api-elixir | 3ff932ee4cb4d04f0dd5ea66ec4b8f83195a003a | [
"Apache-2.0"
] | null | null | null | lib/rasa_api/model/tracker_active_form.ex | whitedr/rasa-api-elixir | 3ff932ee4cb4d04f0dd5ea66ec4b8f83195a003a | [
"Apache-2.0"
] | null | null | null | lib/rasa_api/model/tracker_active_form.ex | whitedr/rasa-api-elixir | 3ff932ee4cb4d04f0dd5ea66ec4b8f83195a003a | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule RasaApi.Model.TrackerActiveForm do
@moduledoc """
Name of the active form
"""
@derive [Poison.Encoder]
defstruct [
:name
]
@type t :: %__MODULE__{
name: String.t | nil
}
end
defimpl Poison.Decoder, for: RasaApi.Model.TrackerActiveForm do
def decode(value, _options) do
value
end
end
| 18.807692 | 91 | 0.699387 |
ff48b98285943567468e314d97ad12ac79ebc2ea | 493 | exs | Elixir | test/elm_phoenix_web_socket_example_web/views/error_view_test.exs | phollyer/elm-phoenix-websocket-example | 147da038b5ca4f9304924124c546284f12ecfaa8 | [
"BSD-3-Clause"
] | null | null | null | test/elm_phoenix_web_socket_example_web/views/error_view_test.exs | phollyer/elm-phoenix-websocket-example | 147da038b5ca4f9304924124c546284f12ecfaa8 | [
"BSD-3-Clause"
] | 2 | 2020-12-29T15:13:39.000Z | 2020-12-30T01:01:02.000Z | test/elm_phoenix_web_socket_example_web/views/error_view_test.exs | phollyer/elm-phoenix-websocket-example | 147da038b5ca4f9304924124c546284f12ecfaa8 | [
"BSD-3-Clause"
] | null | null | null | defmodule ElmPhoenixWebSocketExampleWeb.ErrorViewTest do
use ElmPhoenixWebSocketExampleWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(ElmPhoenixWebSocketExampleWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(ElmPhoenixWebSocketExampleWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end
| 32.866667 | 111 | 0.774848 |
ff48be1329b157116dc35cb5f2f0698ea9586708 | 751 | ex | Elixir | lib/dispatch/supervisor.ex | bjuretic/dispatch | e0c81efe69c0609390c14ddbc7762b6b7c9ba828 | [
"MIT"
] | null | null | null | lib/dispatch/supervisor.ex | bjuretic/dispatch | e0c81efe69c0609390c14ddbc7762b6b7c9ba828 | [
"MIT"
] | null | null | null | lib/dispatch/supervisor.ex | bjuretic/dispatch | e0c81efe69c0609390c14ddbc7762b6b7c9ba828 | [
"MIT"
] | null | null | null | defmodule Dispatch.Supervisor do
use Supervisor
def start_link(_opts \\ []) do
Supervisor.start_link(__MODULE__, :ok, [name: __MODULE__])
end
def init(:ok) do
registry =
Application.get_env(:dispatch, :registry, [])
|> Keyword.put_new(:name, Dispatch.Registry)
pubsub = Application.get_env(:dispatch, :pubsub, [])
children = [
supervisor(pubsub[:adapter] || Phoenix.PubSub.PG2,
[pubsub[:name] || Phoenix.PubSub.Test.PubSub,
pubsub[:opts] || []]),
worker(Dispatch.Registry, [registry]),
worker(Dispatch.HashRingServer, [registry]),
supervisor(Task.Supervisor, [[name: TaskSupervisor]])
]
supervise(children, strategy: :rest_for_one)
end
end
| 26.821429 | 62 | 0.635153 |
ff48c4c826966d4e3c6c8ebd4e44257bd73988c2 | 18,994 | exs | Elixir | test/imagineer/image/png/pngsuite/background/no_background_test.exs | ndemonner/imagineer | a6872296756cde19f8f575a7d1854d0fe7cbcb02 | [
"MIT"
] | 103 | 2015-01-28T19:09:42.000Z | 2018-10-22T15:05:46.000Z | test/imagineer/image/png/pngsuite/background/no_background_test.exs | tyre/imagineer | a6872296756cde19f8f575a7d1854d0fe7cbcb02 | [
"MIT"
] | 12 | 2015-07-11T05:12:41.000Z | 2018-07-21T04:42:47.000Z | test/imagineer/image/png/pngsuite/background/no_background_test.exs | ndemonner/imagineer | a6872296756cde19f8f575a7d1854d0fe7cbcb02 | [
"MIT"
] | 18 | 2015-06-16T15:48:34.000Z | 2018-06-22T02:34:40.000Z | defmodule Imagineer.Image.PNG.PngSuite.Background.NoBackgroundTest do
use ExUnit.Case, async: true
@test_path "test/support/images/pngsuite/background/"
@actual_pixels [
[
{255, 0},
{255, 8},
{255, 16},
{255, 24},
{255, 32},
{255, 41},
{255, 49},
{255, 57},
{255, 65},
{255, 74},
{255, 82},
{255, 90},
{255, 98},
{255, 106},
{255, 115},
{255, 123},
{255, 131},
{255, 139},
{255, 148},
{255, 156},
{255, 164},
{255, 172},
{255, 180},
{255, 189},
{255, 197},
{255, 205},
{255, 213},
{255, 222},
{255, 230},
{255, 238},
{255, 246},
{255, 255}
],
[
{246, 0},
{246, 8},
{246, 16},
{246, 24},
{246, 32},
{246, 41},
{246, 49},
{246, 57},
{246, 65},
{246, 74},
{246, 82},
{246, 90},
{246, 98},
{246, 106},
{246, 115},
{246, 123},
{246, 131},
{246, 139},
{246, 148},
{246, 156},
{246, 164},
{246, 172},
{246, 180},
{246, 189},
{246, 197},
{246, 205},
{246, 213},
{246, 222},
{246, 230},
{246, 238},
{246, 246},
{246, 255}
],
[
{238, 0},
{238, 8},
{238, 16},
{238, 24},
{238, 32},
{238, 41},
{238, 49},
{238, 57},
{238, 65},
{238, 74},
{238, 82},
{238, 90},
{238, 98},
{238, 106},
{238, 115},
{238, 123},
{238, 131},
{238, 139},
{238, 148},
{238, 156},
{238, 164},
{238, 172},
{238, 180},
{238, 189},
{238, 197},
{238, 205},
{238, 213},
{238, 222},
{238, 230},
{238, 238},
{238, 246},
{238, 255}
],
[
{230, 0},
{230, 8},
{230, 16},
{230, 24},
{230, 32},
{230, 41},
{230, 49},
{230, 57},
{230, 65},
{230, 74},
{230, 82},
{230, 90},
{230, 98},
{230, 106},
{230, 115},
{230, 123},
{230, 131},
{230, 139},
{230, 148},
{230, 156},
{230, 164},
{230, 172},
{230, 180},
{230, 189},
{230, 197},
{230, 205},
{230, 213},
{230, 222},
{230, 230},
{230, 238},
{230, 246},
{230, 255}
],
[
{222, 0},
{222, 8},
{222, 16},
{222, 24},
{222, 32},
{222, 41},
{222, 49},
{222, 57},
{222, 65},
{222, 74},
{222, 82},
{222, 90},
{222, 98},
{222, 106},
{222, 115},
{222, 123},
{222, 131},
{222, 139},
{222, 148},
{222, 156},
{222, 164},
{222, 172},
{222, 180},
{222, 189},
{222, 197},
{222, 205},
{222, 213},
{222, 222},
{222, 230},
{222, 238},
{222, 246},
{222, 255}
],
[
{213, 0},
{213, 8},
{213, 16},
{213, 24},
{213, 32},
{213, 41},
{213, 49},
{213, 57},
{213, 65},
{213, 74},
{213, 82},
{213, 90},
{213, 98},
{213, 106},
{213, 115},
{213, 123},
{213, 131},
{213, 139},
{213, 148},
{213, 156},
{213, 164},
{213, 172},
{213, 180},
{213, 189},
{213, 197},
{213, 205},
{213, 213},
{213, 222},
{213, 230},
{213, 238},
{213, 246},
{213, 255}
],
[
{205, 0},
{205, 8},
{205, 16},
{205, 24},
{205, 32},
{205, 41},
{205, 49},
{205, 57},
{205, 65},
{205, 74},
{205, 82},
{205, 90},
{205, 98},
{205, 106},
{205, 115},
{205, 123},
{205, 131},
{205, 139},
{205, 148},
{205, 156},
{205, 164},
{205, 172},
{205, 180},
{205, 189},
{205, 197},
{205, 205},
{205, 213},
{205, 222},
{205, 230},
{205, 238},
{205, 246},
{205, 255}
],
[
{197, 0},
{197, 8},
{197, 16},
{197, 24},
{197, 32},
{197, 41},
{197, 49},
{197, 57},
{197, 65},
{197, 74},
{197, 82},
{197, 90},
{197, 98},
{197, 106},
{197, 115},
{197, 123},
{197, 131},
{197, 139},
{197, 148},
{197, 156},
{197, 164},
{197, 172},
{197, 180},
{197, 189},
{197, 197},
{197, 205},
{197, 213},
{197, 222},
{197, 230},
{197, 238},
{197, 246},
{197, 255}
],
[
{189, 0},
{189, 8},
{189, 16},
{189, 24},
{189, 32},
{189, 41},
{189, 49},
{189, 57},
{189, 65},
{189, 74},
{189, 82},
{189, 90},
{189, 98},
{189, 106},
{189, 115},
{189, 123},
{189, 131},
{189, 139},
{189, 148},
{189, 156},
{189, 164},
{189, 172},
{189, 180},
{189, 189},
{189, 197},
{189, 205},
{189, 213},
{189, 222},
{189, 230},
{189, 238},
{189, 246},
{189, 255}
],
[
{180, 0},
{180, 8},
{180, 16},
{180, 24},
{180, 32},
{180, 41},
{180, 49},
{180, 57},
{180, 65},
{180, 74},
{180, 82},
{180, 90},
{180, 98},
{180, 106},
{180, 115},
{180, 123},
{180, 131},
{180, 139},
{180, 148},
{180, 156},
{180, 164},
{180, 172},
{180, 180},
{180, 189},
{180, 197},
{180, 205},
{180, 213},
{180, 222},
{180, 230},
{180, 238},
{180, 246},
{180, 255}
],
[
{172, 0},
{172, 8},
{172, 16},
{172, 24},
{172, 32},
{172, 41},
{172, 49},
{172, 57},
{172, 65},
{172, 74},
{172, 82},
{172, 90},
{172, 98},
{172, 106},
{172, 115},
{172, 123},
{172, 131},
{172, 139},
{172, 148},
{172, 156},
{172, 164},
{172, 172},
{172, 180},
{172, 189},
{172, 197},
{172, 205},
{172, 213},
{172, 222},
{172, 230},
{172, 238},
{172, 246},
{172, 255}
],
[
{164, 0},
{164, 8},
{164, 16},
{164, 24},
{164, 32},
{164, 41},
{164, 49},
{164, 57},
{164, 65},
{164, 74},
{164, 82},
{164, 90},
{164, 98},
{164, 106},
{164, 115},
{164, 123},
{164, 131},
{164, 139},
{164, 148},
{164, 156},
{164, 164},
{164, 172},
{164, 180},
{164, 189},
{164, 197},
{164, 205},
{164, 213},
{164, 222},
{164, 230},
{164, 238},
{164, 246},
{164, 255}
],
[
{156, 0},
{156, 8},
{156, 16},
{156, 24},
{156, 32},
{156, 41},
{156, 49},
{156, 57},
{156, 65},
{156, 74},
{156, 82},
{156, 90},
{156, 98},
{156, 106},
{156, 115},
{156, 123},
{156, 131},
{156, 139},
{156, 148},
{156, 156},
{156, 164},
{156, 172},
{156, 180},
{156, 189},
{156, 197},
{156, 205},
{156, 213},
{156, 222},
{156, 230},
{156, 238},
{156, 246},
{156, 255}
],
[
{148, 0},
{148, 8},
{148, 16},
{148, 24},
{148, 32},
{148, 41},
{148, 49},
{148, 57},
{148, 65},
{148, 74},
{148, 82},
{148, 90},
{148, 98},
{148, 106},
{148, 115},
{148, 123},
{148, 131},
{148, 139},
{148, 148},
{148, 156},
{148, 164},
{148, 172},
{148, 180},
{148, 189},
{148, 197},
{148, 205},
{148, 213},
{148, 222},
{148, 230},
{148, 238},
{148, 246},
{148, 255}
],
[
{139, 0},
{139, 8},
{139, 16},
{139, 24},
{139, 32},
{139, 41},
{139, 49},
{139, 57},
{139, 65},
{139, 74},
{139, 82},
{139, 90},
{139, 98},
{139, 106},
{139, 115},
{139, 123},
{139, 131},
{139, 139},
{139, 148},
{139, 156},
{139, 164},
{139, 172},
{139, 180},
{139, 189},
{139, 197},
{139, 205},
{139, 213},
{139, 222},
{139, 230},
{139, 238},
{139, 246},
{139, 255}
],
[
{131, 0},
{131, 8},
{131, 16},
{131, 24},
{131, 32},
{131, 41},
{131, 49},
{131, 57},
{131, 65},
{131, 74},
{131, 82},
{131, 90},
{131, 98},
{131, 106},
{131, 115},
{131, 123},
{131, 131},
{131, 139},
{131, 148},
{131, 156},
{131, 164},
{131, 172},
{131, 180},
{131, 189},
{131, 197},
{131, 205},
{131, 213},
{131, 222},
{131, 230},
{131, 238},
{131, 246},
{131, 255}
],
[
{123, 0},
{123, 8},
{123, 16},
{123, 24},
{123, 32},
{123, 41},
{123, 49},
{123, 57},
{123, 65},
{123, 74},
{123, 82},
{123, 90},
{123, 98},
{123, 106},
{123, 115},
{123, 123},
{123, 131},
{123, 139},
{123, 148},
{123, 156},
{123, 164},
{123, 172},
{123, 180},
{123, 189},
{123, 197},
{123, 205},
{123, 213},
{123, 222},
{123, 230},
{123, 238},
{123, 246},
{123, 255}
],
[
{115, 0},
{115, 8},
{115, 16},
{115, 24},
{115, 32},
{115, 41},
{115, 49},
{115, 57},
{115, 65},
{115, 74},
{115, 82},
{115, 90},
{115, 98},
{115, 106},
{115, 115},
{115, 123},
{115, 131},
{115, 139},
{115, 148},
{115, 156},
{115, 164},
{115, 172},
{115, 180},
{115, 189},
{115, 197},
{115, 205},
{115, 213},
{115, 222},
{115, 230},
{115, 238},
{115, 246},
{115, 255}
],
[
{106, 0},
{106, 8},
{106, 16},
{106, 24},
{106, 32},
{106, 41},
{106, 49},
{106, 57},
{106, 65},
{106, 74},
{106, 82},
{106, 90},
{106, 98},
{106, 106},
{106, 115},
{106, 123},
{106, 131},
{106, 139},
{106, 148},
{106, 156},
{106, 164},
{106, 172},
{106, 180},
{106, 189},
{106, 197},
{106, 205},
{106, 213},
{106, 222},
{106, 230},
{106, 238},
{106, 246},
{106, 255}
],
[
{98, 0},
{98, 8},
{98, 16},
{98, 24},
{98, 32},
{98, 41},
{98, 49},
{98, 57},
{98, 65},
{98, 74},
{98, 82},
{98, 90},
{98, 98},
{98, 106},
{98, 115},
{98, 123},
{98, 131},
{98, 139},
{98, 148},
{98, 156},
{98, 164},
{98, 172},
{98, 180},
{98, 189},
{98, 197},
{98, 205},
{98, 213},
{98, 222},
{98, 230},
{98, 238},
{98, 246},
{98, 255}
],
[
{90, 0},
{90, 8},
{90, 16},
{90, 24},
{90, 32},
{90, 41},
{90, 49},
{90, 57},
{90, 65},
{90, 74},
{90, 82},
{90, 90},
{90, 98},
{90, 106},
{90, 115},
{90, 123},
{90, 131},
{90, 139},
{90, 148},
{90, 156},
{90, 164},
{90, 172},
{90, 180},
{90, 189},
{90, 197},
{90, 205},
{90, 213},
{90, 222},
{90, 230},
{90, 238},
{90, 246},
{90, 255}
],
[
{82, 0},
{82, 8},
{82, 16},
{82, 24},
{82, 32},
{82, 41},
{82, 49},
{82, 57},
{82, 65},
{82, 74},
{82, 82},
{82, 90},
{82, 98},
{82, 106},
{82, 115},
{82, 123},
{82, 131},
{82, 139},
{82, 148},
{82, 156},
{82, 164},
{82, 172},
{82, 180},
{82, 189},
{82, 197},
{82, 205},
{82, 213},
{82, 222},
{82, 230},
{82, 238},
{82, 246},
{82, 255}
],
[
{74, 0},
{74, 8},
{74, 16},
{74, 24},
{74, 32},
{74, 41},
{74, 49},
{74, 57},
{74, 65},
{74, 74},
{74, 82},
{74, 90},
{74, 98},
{74, 106},
{74, 115},
{74, 123},
{74, 131},
{74, 139},
{74, 148},
{74, 156},
{74, 164},
{74, 172},
{74, 180},
{74, 189},
{74, 197},
{74, 205},
{74, 213},
{74, 222},
{74, 230},
{74, 238},
{74, 246},
{74, 255}
],
[
{65, 0},
{65, 8},
{65, 16},
{65, 24},
{65, 32},
{65, 41},
{65, 49},
{65, 57},
{65, 65},
{65, 74},
{65, 82},
{65, 90},
{65, 98},
{65, 106},
{65, 115},
{65, 123},
{65, 131},
{65, 139},
{65, 148},
{65, 156},
{65, 164},
{65, 172},
{65, 180},
{65, 189},
{65, 197},
{65, 205},
{65, 213},
{65, 222},
{65, 230},
{65, 238},
{65, 246},
{65, 255}
],
[
{57, 0},
{57, 8},
{57, 16},
{57, 24},
{57, 32},
{57, 41},
{57, 49},
{57, 57},
{57, 65},
{57, 74},
{57, 82},
{57, 90},
{57, 98},
{57, 106},
{57, 115},
{57, 123},
{57, 131},
{57, 139},
{57, 148},
{57, 156},
{57, 164},
{57, 172},
{57, 180},
{57, 189},
{57, 197},
{57, 205},
{57, 213},
{57, 222},
{57, 230},
{57, 238},
{57, 246},
{57, 255}
],
[
{49, 0},
{49, 8},
{49, 16},
{49, 24},
{49, 32},
{49, 41},
{49, 49},
{49, 57},
{49, 65},
{49, 74},
{49, 82},
{49, 90},
{49, 98},
{49, 106},
{49, 115},
{49, 123},
{49, 131},
{49, 139},
{49, 148},
{49, 156},
{49, 164},
{49, 172},
{49, 180},
{49, 189},
{49, 197},
{49, 205},
{49, 213},
{49, 222},
{49, 230},
{49, 238},
{49, 246},
{49, 255}
],
[
{41, 0},
{41, 8},
{41, 16},
{41, 24},
{41, 32},
{41, 41},
{41, 49},
{41, 57},
{41, 65},
{41, 74},
{41, 82},
{41, 90},
{41, 98},
{41, 106},
{41, 115},
{41, 123},
{41, 131},
{41, 139},
{41, 148},
{41, 156},
{41, 164},
{41, 172},
{41, 180},
{41, 189},
{41, 197},
{41, 205},
{41, 213},
{41, 222},
{41, 230},
{41, 238},
{41, 246},
{41, 255}
],
[
{32, 0},
{32, 8},
{32, 16},
{32, 24},
{32, 32},
{32, 41},
{32, 49},
{32, 57},
{32, 65},
{32, 74},
{32, 82},
{32, 90},
{32, 98},
{32, 106},
{32, 115},
{32, 123},
{32, 131},
{32, 139},
{32, 148},
{32, 156},
{32, 164},
{32, 172},
{32, 180},
{32, 189},
{32, 197},
{32, 205},
{32, 213},
{32, 222},
{32, 230},
{32, 238},
{32, 246},
{32, 255}
],
[
{24, 0},
{24, 8},
{24, 16},
{24, 24},
{24, 32},
{24, 41},
{24, 49},
{24, 57},
{24, 65},
{24, 74},
{24, 82},
{24, 90},
{24, 98},
{24, 106},
{24, 115},
{24, 123},
{24, 131},
{24, 139},
{24, 148},
{24, 156},
{24, 164},
{24, 172},
{24, 180},
{24, 189},
{24, 197},
{24, 205},
{24, 213},
{24, 222},
{24, 230},
{24, 238},
{24, 246},
{24, 255}
],
[
{16, 0},
{16, 8},
{16, 16},
{16, 24},
{16, 32},
{16, 41},
{16, 49},
{16, 57},
{16, 65},
{16, 74},
{16, 82},
{16, 90},
{16, 98},
{16, 106},
{16, 115},
{16, 123},
{16, 131},
{16, 139},
{16, 148},
{16, 156},
{16, 164},
{16, 172},
{16, 180},
{16, 189},
{16, 197},
{16, 205},
{16, 213},
{16, 222},
{16, 230},
{16, 238},
{16, 246},
{16, 255}
],
[
{8, 0},
{8, 8},
{8, 16},
{8, 24},
{8, 32},
{8, 41},
{8, 49},
{8, 57},
{8, 65},
{8, 74},
{8, 82},
{8, 90},
{8, 98},
{8, 106},
{8, 115},
{8, 123},
{8, 131},
{8, 139},
{8, 148},
{8, 156},
{8, 164},
{8, 172},
{8, 180},
{8, 189},
{8, 197},
{8, 205},
{8, 213},
{8, 222},
{8, 230},
{8, 238},
{8, 246},
{8, 255}
],
[
{0, 0},
{0, 8},
{0, 16},
{0, 24},
{0, 32},
{0, 41},
{0, 49},
{0, 57},
{0, 65},
{0, 74},
{0, 82},
{0, 90},
{0, 98},
{0, 106},
{0, 115},
{0, 123},
{0, 131},
{0, 139},
{0, 148},
{0, 156},
{0, 164},
{0, 172},
{0, 180},
{0, 189},
{0, 197},
{0, 205},
{0, 213},
{0, 222},
{0, 230},
{0, 238},
{0, 246},
{0, 255}
]
]
test "8 bit grayscale with alpha channel" do
{:ok, image} = Imagineer.load(@test_path <> "bgai4a08.png")
assert image.height == 32
assert image.width == 32
assert image.color_format == :grayscale_alpha
assert image.compression == :zlib
assert image.color_type == 4
assert image.interlace_method == 1
assert image.gamma == 1.0
assert image.bit_depth == 8
assert image.mask == nil
assert image.format == :png
assert image.mime_type == "image/png"
assert image.palette == []
assert_pixels_match(image.pixels, @actual_pixels)
end
defp assert_pixels_match(parsed_pixels, actual_pixels) do
Enum.zip(parsed_pixels, actual_pixels)
|> Enum.with_index()
|> Enum.each(fn
{{parsed, actual}, index} ->
assert(actual == parsed, """
row #{index + 1} is fucked
\texpected: #{inspect(actual)}
\tgot: #{inspect(parsed)}
""")
end)
end
end
| 16.80885 | 69 | 0.312099 |
ff48c82497cc2d658888acd973b323f45dc0f568 | 214 | ex | Elixir | lib/yacto/db/single.ex | aman-io/yacto | 4c26772343a9029923ad7e25245f17f2be22a1a1 | [
"Apache-2.0"
] | 56 | 2017-11-30T02:07:07.000Z | 2022-02-16T17:38:42.000Z | lib/yacto/db/single.ex | aman-io/yacto | 4c26772343a9029923ad7e25245f17f2be22a1a1 | [
"Apache-2.0"
] | 22 | 2018-01-04T00:34:51.000Z | 2021-08-01T06:52:10.000Z | lib/yacto/db/single.ex | aman-io/yacto | 4c26772343a9029923ad7e25245f17f2be22a1a1 | [
"Apache-2.0"
] | 13 | 2018-08-08T05:32:42.000Z | 2021-07-30T14:57:35.000Z | defmodule Yacto.DB.Single do
@behaviour Yacto.DB
@impl Yacto.DB
def repos(_dbname, config, _opts) do
[config.repo]
end
@impl Yacto.DB
def repo(_dbname, config, _opts) do
config.repo
end
end
| 15.285714 | 38 | 0.682243 |
ff48d92a60f9fb9e4dc0b6685b4a72b5fb9b54ca | 1,535 | ex | Elixir | clients/private_ca/lib/google_api/private_ca/v1beta1/model/empty.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/private_ca/lib/google_api/private_ca/v1beta1/model/empty.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/private_ca/lib/google_api/private_ca/v1beta1/model/empty.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.PrivateCA.V1beta1.Model.Empty do
@moduledoc """
A generic empty message that you can re-use to avoid defining duplicated empty messages in your APIs. A typical example is to use it as the request or the response type of an API method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The JSON representation for `Empty` is empty JSON object `{}`.
## Attributes
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{}
end
defimpl Poison.Decoder, for: GoogleApi.PrivateCA.V1beta1.Model.Empty do
def decode(value, options) do
GoogleApi.PrivateCA.V1beta1.Model.Empty.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.PrivateCA.V1beta1.Model.Empty do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.547619 | 345 | 0.758958 |
ff48f7a044d169c43501cb7c004791ecc4096e72 | 450 | ex | Elixir | apps/peedy_f/test/support/case.ex | poteto/peedy | df9d5ee7fcbceb30b5939b36224a257249a180ea | [
"Apache-2.0"
] | 34 | 2017-05-07T08:50:59.000Z | 2021-11-25T00:27:11.000Z | apps/peedy_f/test/support/case.ex | poteto/peedy | df9d5ee7fcbceb30b5939b36224a257249a180ea | [
"Apache-2.0"
] | null | null | null | apps/peedy_f/test/support/case.ex | poteto/peedy | df9d5ee7fcbceb30b5939b36224a257249a180ea | [
"Apache-2.0"
] | 7 | 2017-05-10T12:42:30.000Z | 2021-11-03T01:21:02.000Z | defmodule PeedyF.Case do
use ExUnit.CaseTemplate
using do
quote do
alias PeedyF.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
end
end
setup tags do
opts = tags |> Map.take([:isolation]) |> Enum.to_list()
:ok = Ecto.Adapters.SQL.Sandbox.checkout(PeedyF.Repo, opts)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(PeedyF.Repo, {:shared, self()})
end
:ok
end
end
| 18 | 68 | 0.637778 |
ff48f9d32aeec9143c92ccc227f0e05b591148e8 | 501 | exs | Elixir | integration_test/temple_demo/lib/temple_demo_web/templates/post/form.html.exs | rktjmp/temple | 6fe46cbb4998477d76147fa95c9fd9c7841545ef | [
"MIT"
] | null | null | null | integration_test/temple_demo/lib/temple_demo_web/templates/post/form.html.exs | rktjmp/temple | 6fe46cbb4998477d76147fa95c9fd9c7841545ef | [
"MIT"
] | null | null | null | integration_test/temple_demo/lib/temple_demo_web/templates/post/form.html.exs | rktjmp/temple | 6fe46cbb4998477d76147fa95c9fd9c7841545ef | [
"MIT"
] | null | null | null | form_for @changeset, @action, fn f ->
if @changeset.action do
c Flash, type: :info do
p do: "Oops, something went wrong! Please check the errors below."
end
end
label f, :title
text_input f, :title
error_tag(f, :title)
label f, :body
textarea f, :body
error_tag(f, :body)
label f, :published_at
datetime_select f, :published_at
error_tag(f, :published_at)
label f, :author
text_input f, :author
error_tag(f, :author)
div do
submit "Save"
end
end
| 17.892857 | 72 | 0.658683 |
ff49055ca35d21873deaa5a5a83928eef85bef25 | 3,370 | ex | Elixir | lib/ethereum/rpc/personal.ex | alanwilhelm/elixir-ethereum | 42aba002e485524be2981ae05b6f7134f8439dc4 | [
"MIT"
] | null | null | null | lib/ethereum/rpc/personal.ex | alanwilhelm/elixir-ethereum | 42aba002e485524be2981ae05b6f7134f8439dc4 | [
"MIT"
] | null | null | null | lib/ethereum/rpc/personal.ex | alanwilhelm/elixir-ethereum | 42aba002e485524be2981ae05b6f7134f8439dc4 | [
"MIT"
] | null | null | null | defmodule Ethereum.Personal do
@moduledoc """
Personal namespace for Ethereum JSON-RPC
This could be considered dangerous as it requires the admin api to be exposed over JSON-RPC.
Use only in a safe environment and see README to enable this namespace in Geth.
"""
alias Ethereum.Transport
alias Ethereum.Conversion
require Logger
require IEx
@doc """
Create new account to be managed by connected Ethereum node with password/password confirmation
## Example:
iex> Ethereum.new_account("p@55w0rd","p@55w0rd")
"""
@spec new_account(password :: String.t, password_confirmation :: String.t) :: {:ok, String.t} | {:error, String.t}
def new_account(password, password_confirmation) do
case Transport.send("personal_newAccount",[password]) do
{:ok, account_hash} ->
{:ok, account_hash}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Unlock account account on conected Ethereum node
## Example:
iex> Ethereum.unlock_account("0xe55c5bb9d42307e03fb4aa39ccb878c16f6f901e", "h4ck3r")
{:ok, true}
"""
@spec unlock_account(account :: String.t, password :: String.t) :: {:ok, boolean} | {:error, String.t}
def unlock_account(account, password) do
case Transport.send("personal_unlockAccount", [account, password]) do
{:ok, result} ->
{:ok, result}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Lock account account on conected Ethereum node
## Example:
iex> Ethereum.lock_account("0xe55c5bb9d42307e03fb4aa39ccb878c16f6f901e")
{:ok, true}
"""
@spec lock_account(account :: String.t) :: {:ok, boolean} | {:error, String.t}
def lock_account(account) do
case Transport.send("personal_lockAccount", [account]) do
{:ok, result} ->
{:ok, result}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Send a transaction usinsg an unlocked account
## Example:
iex> sender = "0x3f156afdb248618892cb5089ba5a5fcac8ee0b01"
...> receiver = "0x0f31986d7a0d4f160acd97583e3c3b591dcb5dde"
...> amount = 0.5
...> password = ""
...> Enum.each(1..1000, fn x -> EsprezzoEthereum.send_transaction(sender, receiver, amount, password) end)
...> - OR -
...> Ethereum.send_transaction(sender, receiver, amount, password)
{:ok, "88c646f79ecb2b596f6e51f7d5db2abd67c79ff1f554e9c6cd2915f486f34dcb"}
"""
@spec send_transaction(from :: String.t, to :: String, value :: float, password :: String.t) :: {:ok, boolean} | {:error, String.t}
def send_transaction(from, to, value, password) do
wei_value = Conversion.to_wei(value, :ether)
hex_wei_value = "0x" <> Hexate.encode(wei_value)
Logger.warn "wei value to send: #{wei_value}"
params = [%{
"from": from,
"to": to,
"gas": "0x186a0", # 100k,
"gasPrice": "0x9184e72a000", # 10000000000000
#"gasPrice": "0x48c27395000", # 5000000000000
#"gasPrice": "0x1840d131aab", # 1666666666666.6667
"value": hex_wei_value
},
password
]
case Transport.send("personal_sendTransaction", params) do
{:ok, result} ->
Logger.warn "SendTransaction result: #{inspect result}"
{:ok, result}
{:error, reason} ->
{:error, reason}
end
end
end
| 30.089286 | 133 | 0.637982 |
ff491d331b370660f4f331be5030ad2217183938 | 2,863 | exs | Elixir | test/query_test.exs | gnalck/elixir-sqlite3 | 0ab43c07d226de8f5b49d38ca15e2a3f05fb48ba | [
"MIT"
] | 1 | 2020-05-19T08:32:02.000Z | 2020-05-19T08:32:02.000Z | test/query_test.exs | gnalck/elixir-sqlite3 | 0ab43c07d226de8f5b49d38ca15e2a3f05fb48ba | [
"MIT"
] | null | null | null | test/query_test.exs | gnalck/elixir-sqlite3 | 0ab43c07d226de8f5b49d38ca15e2a3f05fb48ba | [
"MIT"
] | null | null | null | defmodule XQLite3Test do
use ExUnit.Case, async: true
import XQLite3.TestHelper
setup do
{:ok, conn} = XQLite3.start_link(":memory:")
{:ok, [conn: conn]}
end
# sqlite has dynamic typing, and so if there is no backing type
# we pretty much use the 'raw' return value - as we cannot
# intelligently convert them.
test "decode basic types without backing table", context do
assert [[nil]] = query("SELECT NULL", [])
assert [[1]] = query("SELECT 1", [])
assert [["1"]] = query("SELECT '1'", [])
assert [[1, 0]] = query("SELECT true, false", [])
assert [["e"]] = query("SELECT 'e'", [])
assert [["ẽ"]] = query("SELECT 'ẽ'", [])
assert [[42]] = query("SELECT 42", [])
assert [[42.0]] = query("SELECT 42.0", [])
assert [[<<16, 0>>]] = query("SELECT x'1000'", [])
# cause seg fault - likely issue with esqlite
# assert [["Inf"]] = query("SELECT 9e999", [])
# assert [["-Inf"]] = query("SELECT -9e999", [])
end
test "column names", context do
assert {:ok, res} = XQLite3.query(context[:conn], "select 1 as A, 2 as B", [])
assert %XQLite3.Result{} = res
assert ["a", "b"] == res.columns
end
test "encode and decode bool / boolean", context do
assert [] = query("CREATE TABLE foo(a BOOL, b BOOLEAN)", [])
assert [] = query("INSERT INTO foo VALUES($1, $2)", [true, false])
assert [[true, false]] == query("SELECT * FROM foo", [])
end
test "encode and decode time", context do
time = ~T[23:51:02.491415]
assert [] = query("CREATE TABLE foo(a TIME)", [])
assert [] = query("INSERT INTO foo VALUES($1)", [time])
assert [[time]] == query("SELECT * FROM foo", [])
end
test "encode and decode date", context do
date = ~D[2020-05-11]
assert [] = query("CREATE TABLE foo(a DATE)", [])
assert [] = query("INSERT INTO foo VALUES($1)", [date])
assert [[date]] == query("SELECT * FROM foo", [])
end
test "encode and decode datetime", context do
datetime = ~U[2020-05-11 00:28:33.696598Z]
assert [] = query("CREATE TABLE foo(a DATETIME)", [])
assert [] = query("INSERT INTO foo VALUES($1)", [datetime])
assert [[datetime]] == query("SELECT * FROM foo", [])
end
test "encode and decode blob", context do
blob = <<16, 0>>
assert [] = query("CREATE TABLE foo(a BLOB)", [])
assert [] = query("INSERT INTO foo VALUES($1)", [blob])
assert [[blob]] = query("SELECT * from foo", [])
end
test "insert", context do
assert [] == query("CREATE TABLE foo(bar);", [])
assert [] == query("SELECT * from foo", [])
assert [] == query("INSERT INTO foo VALUES ($1)", ["wow"])
assert [["wow"]] == query("SELECT * FROM foo", [])
end
test "decode basic types with backing table", context do
assert [] = query("CREATE TABLE TEST(int INT, text TEXT, real REAL, blob BLOB);", [])
end
end
| 35.7875 | 89 | 0.583304 |
ff4927f90799b92261f9e49cff1038142adacea3 | 1,303 | ex | Elixir | apps/domain/lib/domain/queue.ex | msk-access/seqosystem | 4d99c50a0b0bc74c7f9f899be4eda8eddf6e5a39 | [
"Apache-2.0"
] | 3 | 2020-11-24T07:45:26.000Z | 2021-07-29T13:37:02.000Z | apps/domain/lib/domain/queue.ex | mskcc/seqosystem | 4d99c50a0b0bc74c7f9f899be4eda8eddf6e5a39 | [
"Apache-2.0"
] | 52 | 2020-10-21T19:47:59.000Z | 2021-09-09T18:42:33.000Z | apps/domain/lib/domain/queue.ex | msk-access/seqosystem | 4d99c50a0b0bc74c7f9f899be4eda8eddf6e5a39 | [
"Apache-2.0"
] | 1 | 2020-12-15T03:33:31.000Z | 2020-12-15T03:33:31.000Z | defmodule Domain.Queue do
@moduledoc """
The Queue context.
"""
alias Domain.Repo
import Ecto.Query, warn: false
alias Oban.Job
@doc """
Returns the list of audit_versions.
## Examples
iex> list_jobs()
[%Job{}, ...]
"""
def list_jobs(%{page: page, per_page: per_page, sort_by: sort_by, filters: filters}) do
sort_by = Keyword.new(sort_by, fn {key, val} -> {val, key} end)
filters = Enum.map(filters, fn {key, value} -> {key, value} end)
count =
Job
|> select([s], count(s.id))
|> where(^filters)
|> Repo.one()
query =
Job
|> offset(^((page - 1) * per_page))
|> limit(^per_page)
|> where(^filters)
|> order_by(^sort_by)
entries = Repo.all(query)
%{entries: entries, count: count}
end
def get_latest_completed_job_by_queue(queue) do
Repo.one(
from(j in Job,
where: not is_nil(j.completed_at) and j.queue == ^queue,
order_by: [desc: :completed_at],
limit: 1
)
)
end
def retry_job(id) do
Oban.retry_job(id)
end
def list_states do
Job
|> select([j], j.state)
|> group_by(:state)
|> Repo.all()
end
def list_queues do
Job
|> select([j], j.queue)
|> group_by(:queue)
|> Repo.all()
end
end
| 19.161765 | 89 | 0.566385 |
ff4938f8ac0e49b93a56e8ac957013b2392979a3 | 6,546 | ex | Elixir | lib/mix/tasks/nerves_hub.firmware.ex | brianberlin/nerves_hub_cli | b668ffbb525aed3f3bf18b54ead1f0a969b90485 | [
"Apache-2.0"
] | null | null | null | lib/mix/tasks/nerves_hub.firmware.ex | brianberlin/nerves_hub_cli | b668ffbb525aed3f3bf18b54ead1f0a969b90485 | [
"Apache-2.0"
] | null | null | null | lib/mix/tasks/nerves_hub.firmware.ex | brianberlin/nerves_hub_cli | b668ffbb525aed3f3bf18b54ead1f0a969b90485 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.NervesHub.Firmware do
use Mix.Task
@shortdoc "Manages firmware on NervesHub"
@moduledoc """
Manage Firmware on NervesHub
## publish
Upload signed firmware to NervesHub. Supplying a path to the firmware file
is optional. If it is not specified, NervesHub will locate the firmware
based off the project settings.
mix nerves_hub.firmware publish [Optional: /path/to/app.firmware]
### Command-line options
* `--product` - (Optional) The product name to publish the firmware to.
This defaults to the Mix Project config `:app` name.
* `--deploy` - (Optional) The name of a deployment to update following
firmware publish. This key can be passed multiple times to update
multiple deployments.
* `--key` - (Optional) The firmware signing key to sign the firmware with.
* `--ttl` - (Optional) The firmware max time to live seconds.
## list
mix nerves_hub.firmware list
### Command-line options
* `--product` - (Optional) The product name to publish the firmware to.
This defaults to the Mix Project config `:app` name.
## delete
Firmware can only be deleted if it is not associated to any deployment.
Call `list` to retrieve firmware UUIDs
mix nerves_hub.firmware delete [firmware_uuid]
## sign
Sign the local firmware. Supplying a path to the firmware file
is optional. If it is not specified, NervesHub will locate the firmware
based off the project settings.
mix nerves_hub.firmware sign [Optional: /path/to/app.firmware]
### Command-line options
* `--key` - (Optional) The firmware signing key to sign the firmware with.
"""
import Mix.NervesHubCLI.Utils
alias NervesHubCLI.Cmd
alias Mix.NervesHubCLI.Shell
@switches [
org: :string,
product: :string,
deploy: :keep,
key: :string,
ttl: :integer
]
def run(args) do
_ = Application.ensure_all_started(:nerves_hub_cli)
{opts, args} = OptionParser.parse!(args, strict: @switches)
show_api_endpoint()
org = org(opts)
product = product(opts)
case args do
["list"] ->
list(org, product)
["publish" | []] ->
firmware()
|> publish_confirm(org, opts)
["publish", firmware] when is_binary(firmware) ->
firmware
|> Path.expand()
|> publish_confirm(org, opts)
["delete", uuid] when is_binary(uuid) ->
delete_confirm(uuid, org, product)
["sign"] ->
firmware()
|> sign(org, opts)
["sign", firmware] ->
sign(firmware, org, opts)
_ ->
render_help()
end
end
@spec render_help() :: no_return()
def render_help() do
Shell.raise("""
Invalid arguments
Usage:
mix nerves_hub.firmware list
mix nerves_hub.firmware publish
mix nerves_hub.firmware delete
mix nerves_hub.firmware sign
Run `mix help nerves_hub.firmware` for more information.
""")
end
def list(org, product) do
auth = Shell.request_auth()
case NervesHubUserAPI.Firmware.list(org, product, auth) do
{:ok, %{"data" => []}} ->
Shell.info("No firmware has been published for product: #{product}")
{:ok, %{"data" => firmwares}} ->
Shell.info("")
Shell.info("Firmwares:")
Enum.each(firmwares, fn metadata ->
Shell.info("------------")
render_firmware(metadata)
|> String.trim_trailing()
|> Shell.info()
end)
Shell.info("")
error ->
Shell.render_error(error)
end
end
defp publish_confirm(firmware, org, opts) do
with true <- File.exists?(firmware),
{:ok, metadata} <- metadata(firmware) do
Shell.info("------------")
Shell.info("Organization: #{org}")
render_firmware(metadata)
|> String.trim_trailing()
|> Shell.info()
if Shell.yes?("Publish Firmware?") do
product = metadata["product"]
publish(firmware, org, product, opts)
end
else
false ->
Shell.info("Cannot find firmware at #{firmware}")
{:error, reason} ->
Shell.info("Unable to parse firmware metadata: #{inspect(reason)}")
end
end
defp delete_confirm(uuid, org, product) do
Shell.info("UUID: #{uuid}")
if Shell.yes?("Delete Firmware?") do
delete(uuid, org, product)
end
end
defp publish(firmware, org, product, opts) do
if opts[:key] do
sign(firmware, org, opts)
end
auth = Shell.request_auth()
ttl = opts[:ttl]
case NervesHubUserAPI.Firmware.create(org, product, firmware, ttl, auth) do
{:ok, %{"data" => %{} = firmware}} ->
Shell.info("\nFirmware published successfully")
Keyword.get_values(opts, :deploy)
|> maybe_deploy(firmware, org, product, auth)
error ->
Shell.render_error(error)
end
end
defp delete(uuid, org, product) do
auth = Shell.request_auth()
case NervesHubUserAPI.Firmware.delete(org, product, uuid, auth) do
{:ok, ""} ->
Shell.info("Firmware deleted successfully")
error ->
Shell.render_error(error)
end
end
def sign(firmware, org, opts) do
key = opts[:key] || Shell.raise("Must specify key with --key")
Shell.info("Signing #{firmware}")
Shell.info("With key #{key}")
with {:ok, public_key, private_key} <- Shell.request_keys(org, key),
:ok <-
Cmd.fwup(
[
"--sign",
"-i",
firmware,
"-o",
firmware,
"--private-key",
private_key,
"--public-key",
public_key
],
File.cwd!()
) do
Shell.info("Finished signing")
else
error -> Shell.render_error(error)
end
end
defp maybe_deploy([], _, _, _, _), do: :ok
defp maybe_deploy(deployments, firmware, org, product, auth) do
Enum.each(deployments, fn deployment_name ->
Shell.info("Deploying firmware to #{deployment_name}")
Mix.Tasks.NervesHub.Deployment.update(
deployment_name,
"firmware",
firmware["uuid"],
org,
product,
auth
)
end)
end
defp render_firmware(params) do
"""
product: #{params["product"]}
version: #{params["version"]}
platform: #{params["platform"]}
architecture: #{params["architecture"]}
uuid: #{params["uuid"]}
"""
end
end
| 24.701887 | 79 | 0.595631 |
ff495188da528825f30a9697cf35548c334438ed | 789 | exs | Elixir | test/unit/hologram/compiler/module_def_aggregators/function_definition_test.exs | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 40 | 2022-01-19T20:27:36.000Z | 2022-03-31T18:17:41.000Z | test/unit/hologram/compiler/module_def_aggregators/function_definition_test.exs | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 42 | 2022-02-03T22:52:43.000Z | 2022-03-26T20:57:32.000Z | test/unit/hologram/compiler/module_def_aggregators/function_definition_test.exs | gregjohnsonsaltaire/hologram | aa8e9ea0d599def864c263cc37cc8ee31f02ac4a | [
"MIT"
] | 3 | 2022-02-10T04:00:37.000Z | 2022-03-08T22:07:45.000Z | defmodule Hologram.Compiler.ModuleDefAggregator.FunctionDefinitionTest do
use Hologram.Test.UnitCase, async: false
alias Hologram.Compiler.{ModuleDefAggregator, ModuleDefStore}
alias Hologram.Compiler.IR.{Block, FunctionDefinition, ModuleDefinition, ModuleType}
alias Hologram.Test.Fixtures.{PlaceholderModule1, PlaceholderModule2}
setup do
ModuleDefStore.run()
:ok
end
test "aggregate/1" do
ir = %FunctionDefinition{
body: %Block{expressions: [
%ModuleType{module: PlaceholderModule1},
%ModuleType{module: PlaceholderModule2}
]}
}
ModuleDefAggregator.aggregate(ir)
assert %ModuleDefinition{} = ModuleDefStore.get!(PlaceholderModule1)
assert %ModuleDefinition{} = ModuleDefStore.get!(PlaceholderModule2)
end
end
| 29.222222 | 86 | 0.749049 |
ff4953336c87fa289555edfd3eb2ebb558ddc0c2 | 2,077 | ex | Elixir | apps/ema/test/support/service_case.ex | hoyon/skye | 50a25a55bb0c38460a0bd204c8d0ce716da9f017 | [
"MIT"
] | null | null | null | apps/ema/test/support/service_case.ex | hoyon/skye | 50a25a55bb0c38460a0bd204c8d0ce716da9f017 | [
"MIT"
] | null | null | null | apps/ema/test/support/service_case.ex | hoyon/skye | 50a25a55bb0c38460a0bd204c8d0ce716da9f017 | [
"MIT"
] | null | null | null | defmodule Ema.ServiceCase do
use ExUnit.CaseTemplate
alias Ema.{Service, Type}
using opts do
quote bind_quoted: [opts: opts] do
import Ema.ServiceCase
import Mox
@service Keyword.fetch!(opts, :service)
test_service_sanity(@service)
end
end
@doc "Ensure action returns correct type for a given input"
defmacro test_action(action, input, output) do
quote do
test "action #{@service}.#{unquote(action)} with input #{inspect(unquote(input))}" do
service = @service
action = unquote(action)
input = unquote(input)
output = unquote(output)
output_typename = Service.actions(service)[action].output
{:ok, result} = Service.run(service, action, input)
assert Type.check_type(result, service, output_typename)
assert result == output
end
end
end
@doc "Ensure trigger returns correct type for a given input"
defmacro test_trigger(trigger, input, output) do
quote do
test "trigger #{@service}.#{unquote(trigger)} with input #{inspect(unquote(input))}" do
service = @service
trigger = unquote(trigger)
input = unquote(input)
output = unquote(output)
output_typename = Service.triggers(service)[trigger].output
assert {:ok, result} = Service.run_trigger(service, trigger, input)
assert Type.check_type(result, service, output_typename)
assert result == output
end
end
end
@doc """
Ensure that the service has all required fields
Required fields:
- name
- description
"""
defmacro test_service_sanity(service) do
quote do
@required_functions [
:__ema_service,
:__ema_name,
:__ema_description
]
test "#{unquote(service)} sanity test" do
# Required functions
functions = apply(unquote(service), :__info__, [:functions])
assert @required_functions
|> Enum.map(fn f -> Keyword.has_key?(functions, f) end)
|> Enum.all?(& &1)
end
end
end
end
| 28.067568 | 93 | 0.636976 |
ff495c78a0e53d79c4dffb1f35e07e20aa10b027 | 1,227 | ex | Elixir | lib/doc_gen_web/controllers/embed_controller.ex | the-mikedavis/doc_gen | efcc884ea65bba5748f41c5601abd00db2777ec4 | [
"BSD-3-Clause"
] | null | null | null | lib/doc_gen_web/controllers/embed_controller.ex | the-mikedavis/doc_gen | efcc884ea65bba5748f41c5601abd00db2777ec4 | [
"BSD-3-Clause"
] | 27 | 2018-10-29T18:34:44.000Z | 2019-03-11T18:43:12.000Z | lib/doc_gen_web/controllers/embed_controller.ex | the-mikedavis/doc_gen | efcc884ea65bba5748f41c5601abd00db2777ec4 | [
"BSD-3-Clause"
] | null | null | null | defmodule DocGenWeb.EmbedController do
use DocGenWeb, :controller
alias DocGen.Content
alias Content.Embed
def index(conn, _params) do
embeds = Content.list_embeds()
render(conn, "index.html", embeds: embeds)
end
def all(conn, _params) do
embeds = Content.list_embeds()
render(conn, "all.html", embeds: embeds)
end
def new(conn, _params) do
changeset = Content.change_embed(%Embed{})
render(conn, "new.html", changeset: changeset)
end
def create(conn, %{"embed" => embed_params}) do
case Content.create_embed(embed_params) do
{:ok, embed} ->
conn
|> put_flash(:info, "Embed created successfully.")
|> redirect(to: Routes.embed_path(conn, :show, embed))
{:error, %Ecto.Changeset{} = changeset} ->
render(conn, "new.html", changeset: changeset)
end
end
def show(conn, %{"id" => id}) do
embed = Content.get_embed!(id)
render(conn, "show.html", embed: embed)
end
def delete(conn, %{"id" => id}) do
embed = Content.get_embed!(id)
{:ok, _embed} = Content.delete_embed(embed)
conn
|> put_flash(:info, "Embed deleted successfully.")
|> redirect(to: Routes.embed_path(conn, :index))
end
end
| 24.54 | 62 | 0.641402 |
ff497da130196ab2d45516a341bc7bd6a9d9f76e | 1,585 | exs | Elixir | mix.exs | nsb/spandex_phoenix | dee00efa8f8960d4364bde9f6f84caafac175dfa | [
"MIT"
] | null | null | null | mix.exs | nsb/spandex_phoenix | dee00efa8f8960d4364bde9f6f84caafac175dfa | [
"MIT"
] | null | null | null | mix.exs | nsb/spandex_phoenix | dee00efa8f8960d4364bde9f6f84caafac175dfa | [
"MIT"
] | null | null | null | defmodule SpandexPhoenix.MixProject do
use Mix.Project
@version "0.3.0"
def project do
[
app: :spandex_phoenix,
version: @version,
elixir: "~> 1.6",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: compilers(Mix.env()),
start_permanent: Mix.env() == :prod,
package: package(),
description: description(),
docs: docs(),
deps: deps(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [coveralls: :test, "coveralls.circle": :test]
]
end
def application do
[
extra_applications: [:logger]
]
end
defp package do
[
name: :spandex_phoenix,
maintainers: ["Zachary Daniel", "Greg Mefford"],
licenses: ["MIT License"],
links: %{"GitHub" => "https://github.com/spandex-project/spandex_phoenix"}
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp compilers(:test), do: [:phoenix] ++ Mix.compilers()
defp compilers(_), do: Mix.compilers()
defp description() do
"""
Tools for integrating Phoenix with Spandex.
"""
end
defp docs do
[
main: "readme",
extras: [
"README.md"
]
]
end
defp deps do
[
{:excoveralls, "~> 0.10", only: :test},
{:ex_doc, ">= 0.0.0", only: :dev},
{:git_ops, "~> 0.4.1", only: :dev},
{:inch_ex, github: "rrrene/inch_ex", only: [:dev, :test]},
{:phoenix, "~> 1.0", optional: true, only: [:dev, :test]},
{:plug, "~> 1.3"},
{:spandex, "~> 2.2"}
]
end
end
| 22.323944 | 80 | 0.557098 |
ff49a0afbb991d7baf15d3802615bef5b2a5f5ce | 3,106 | ex | Elixir | clients/run/lib/google_api/run/v1/model/secret_volume_source.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | clients/run/lib/google_api/run/v1/model/secret_volume_source.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | clients/run/lib/google_api/run/v1/model/secret_volume_source.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Run.V1.Model.SecretVolumeSource do
@moduledoc """
Cloud Run fully managed: not supported Cloud Run for Anthos: supported The contents of the target Secret's Data field will be presented in a volume as files using the keys in the Data field as the file names.
## Attributes
* `defaultMode` (*type:* `integer()`, *default:* `nil`) - (Optional) Cloud Run fully managed: not supported Cloud Run for Anthos: supported Mode bits to use on created files by default. Must be a value between 0 and 0777. Defaults to 0644. Directories within the path are not affected by this setting. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.
* `items` (*type:* `list(GoogleApi.Run.V1.Model.KeyToPath.t)`, *default:* `nil`) - (Optional) Cloud Run fully managed: not supported Cloud Run for Anthos: supported If unspecified, each key-value pair in the Data field of the referenced Secret will be projected into the volume as a file whose name is the key and content is the value. If specified, the listed keys will be projected into the specified paths, and unlisted keys will not be present. If a key is specified which is not present in the Secret, the volume setup will error unless it is marked optional.
* `optional` (*type:* `boolean()`, *default:* `nil`) - (Optional) Cloud Run fully managed: not supported Cloud Run for Anthos: supported Specify whether the Secret or its keys must be defined.
* `secretName` (*type:* `String.t`, *default:* `nil`) - Cloud Run fully managed: not supported Cloud Run for Anthos: supported Name of the secret in the container's namespace to use.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:defaultMode => integer(),
:items => list(GoogleApi.Run.V1.Model.KeyToPath.t()),
:optional => boolean(),
:secretName => String.t()
}
field(:defaultMode)
field(:items, as: GoogleApi.Run.V1.Model.KeyToPath, type: :list)
field(:optional)
field(:secretName)
end
defimpl Poison.Decoder, for: GoogleApi.Run.V1.Model.SecretVolumeSource do
def decode(value, options) do
GoogleApi.Run.V1.Model.SecretVolumeSource.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Run.V1.Model.SecretVolumeSource do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 55.464286 | 568 | 0.737605 |
ff49c2a8cc791bfa0ea1ccfb41ea8e6efd153582 | 1,657 | ex | Elixir | lib/community/news/news.ex | mdsebald/GraphQL_Demo | a326e784a81a824990381289146419b62bb19b7b | [
"Apache-2.0"
] | 1 | 2018-01-16T10:28:38.000Z | 2018-01-16T10:28:38.000Z | lib/community/news/news.ex | aleccool213/elixir-graphql-example | 8f89f4898cda7b7544321ff8dda2f76ea22d4c58 | [
"MIT"
] | null | null | null | lib/community/news/news.ex | aleccool213/elixir-graphql-example | 8f89f4898cda7b7544321ff8dda2f76ea22d4c58 | [
"MIT"
] | null | null | null | defmodule Community.News do
@moduledoc """
The News context.
"""
import Ecto.Query, warn: false
alias Community.Repo
alias Community.News.Link
@doc """
Returns the list of links.
## Examples
iex> list_links()
[%Link{}, ...]
"""
def list_links do
Repo.all(Link)
end
@doc """
Gets a single link.
Raises `Ecto.NoResultsError` if the Link does not exist.
## Examples
iex> get_link!(123)
%Link{}
iex> get_link!(456)
** (Ecto.NoResultsError)
"""
def get_link!(id), do: Repo.get!(Link, id)
@doc """
Creates a link.
## Examples
iex> create_link(%{field: value})
{:ok, %Link{}}
iex> create_link(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_link(attrs \\ %{}) do
%Link{}
|> Link.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a link.
## Examples
iex> update_link(link, %{field: new_value})
{:ok, %Link{}}
iex> update_link(link, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_link(%Link{} = link, attrs) do
link
|> Link.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Link.
## Examples
iex> delete_link(link)
{:ok, %Link{}}
iex> delete_link(link)
{:error, %Ecto.Changeset{}}
"""
def delete_link(%Link{} = link) do
Repo.delete(link)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking link changes.
## Examples
iex> change_link(link)
%Ecto.Changeset{source: %Link{}}
"""
def change_link(%Link{} = link) do
Link.changeset(link, %{})
end
end
| 15.780952 | 59 | 0.564273 |
ff49c80e07a8fdef2f63a124cd5668dabf325e63 | 596 | exs | Elixir | config/test_mysql_ex_money.exs | CoinbitsInc/ex_double_entry | 31adbcaa0be14fb4f4a725fbb9f3ce6ab8402636 | [
"MIT"
] | null | null | null | config/test_mysql_ex_money.exs | CoinbitsInc/ex_double_entry | 31adbcaa0be14fb4f4a725fbb9f3ce6ab8402636 | [
"MIT"
] | null | null | null | config/test_mysql_ex_money.exs | CoinbitsInc/ex_double_entry | 31adbcaa0be14fb4f4a725fbb9f3ce6ab8402636 | [
"MIT"
] | null | null | null | import Config
config :ex_double_entry,
db: :mysql,
money: :ex_money
config :ex_double_entry, ExDoubleEntry.Repo,
username: System.get_env("MYSQL_DB_USERNAME", "root"),
password: System.get_env("MYSQL_DB_PASSWORD", ""),
database: System.get_env("MYSQL_DB_NAME", "ex_double_entry_test"),
hostname: System.get_env("MYSQL_DB_HOST", "localhost"),
pool: Ecto.Adapters.SQL.Sandbox,
show_sensitive_data_on_connection_error: true,
timeout: :infinity,
queue_target: 200,
queue_interval: 10
config :logger, level: :info
config :ex_money,
default_cldr_backend: ExDoubleEntry.Cldr
| 27.090909 | 68 | 0.763423 |
ff49dccdc2551960440431784572a8c76865ab6b | 161 | ex | Elixir | lib/leather_web/controllers/page_controller.ex | nicksergeant/leather | 15b1c9403999737f7a6ee9a1c0349e047805bbe6 | [
"MIT"
] | 67 | 2016-10-24T04:11:40.000Z | 2021-11-25T16:46:51.000Z | lib/leather_web/controllers/page_controller.ex | nicksergeant/leather | 15b1c9403999737f7a6ee9a1c0349e047805bbe6 | [
"MIT"
] | 6 | 2017-08-17T21:43:50.000Z | 2021-11-03T13:13:49.000Z | lib/leather_web/controllers/page_controller.ex | nicksergeant/leather | 15b1c9403999737f7a6ee9a1c0349e047805bbe6 | [
"MIT"
] | 7 | 2017-08-13T01:43:37.000Z | 2022-01-11T04:38:27.000Z | defmodule LeatherWeb.PageController do
@moduledoc false
use LeatherWeb, :controller
def index(conn, _params) do
render(conn, "index.html")
end
end
| 16.1 | 38 | 0.732919 |
ff49f89f58aa3db2ca999d20dfdab23d6d14628d | 1,585 | ex | Elixir | clients/people/lib/google_api/people/v1/model/batch_delete_contacts_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/people/lib/google_api/people/v1/model/batch_delete_contacts_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/people/lib/google_api/people/v1/model/batch_delete_contacts_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.People.V1.Model.BatchDeleteContactsRequest do
@moduledoc """
A request to delete a batch of existing contacts.
## Attributes
* `resourceNames` (*type:* `list(String.t)`, *default:* `nil`) - Required. The resource names of the contact to delete. It's repeatable. Allows up to 500 resource names in a single request.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:resourceNames => list(String.t()) | nil
}
field(:resourceNames, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.People.V1.Model.BatchDeleteContactsRequest do
def decode(value, options) do
GoogleApi.People.V1.Model.BatchDeleteContactsRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.People.V1.Model.BatchDeleteContactsRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.723404 | 193 | 0.744479 |
ff4a071ec9ae7a72d639892157a03ceed4b90b94 | 1,405 | exs | Elixir | prag-programing/Part1/ch13proj/issues/test/table_formatter_test.exs | mpahrens/fun-with-elixir | 8e6720c7c2004421f674bb450045df0ab59b9c74 | [
"Apache-2.0"
] | 1 | 2017-03-29T04:10:50.000Z | 2017-03-29T04:10:50.000Z | prag-programing/Part1/ch13proj/issues/test/table_formatter_test.exs | mpahrens/fun-with-elixir | 8e6720c7c2004421f674bb450045df0ab59b9c74 | [
"Apache-2.0"
] | null | null | null | prag-programing/Part1/ch13proj/issues/test/table_formatter_test.exs | mpahrens/fun-with-elixir | 8e6720c7c2004421f674bb450045df0ab59b9c74 | [
"Apache-2.0"
] | null | null | null | defmodule TableFormatterTest do
use ExUnit.Case
import ExUnit.CaptureIO #sends capture stuff to stdout
alias Issues.TableFormatter, as: TF
def simple_test_data do
[
[ c1: "r1 c1", c2: "r1 c2", c3: "r1 c3", c4: "r1+++c4"],
[ c1: "r2 c1", c2: "r2 c2", c3: "r2 c3", c4: "r2 c4"],
[ c1: "r3 c1", c2: "r3 c2", c3: "r3 c3", c4: "r3 c4"],
[ c1: "r4 c1", c2: "r4++c2", c3: "r4 c3", c4: "r4 c4"]
]
end
def headers, do: [ :c1, :c2, :c4 ]
def split_with_three_columns,
do: TF.split_into_columns(simple_test_data, headers)
test "split_into_columns" do
columns = split_with_three_columns
assert length(columns) == length(headers)
assert List.first(columns) == ["r1 c1", "r2 c1", "r3 c1", "r4 c1"]
assert List.last(columns) == ["r1+++c4", "r2 c4", "r3 c4", "r4 c4"]
end
test "column_widths" do
widths = TF.widths_of(split_with_three_columns)
assert widths == [5,6,7]
end
test "correct format string returned" do
assert TF.format_for([9,10,11]) == "~-9s | ~-10s | ~-11s~n"
end
test "Output is correct" do
result = capture_io fn ->
TF.print_table_for_columns(simple_test_data, headers)
end
assert result == """
c1 | c2 | c4
------+--------+--------
r1 c1 | r1 c2 | r1+++c4
r2 c1 | r2 c2 | r2 c4
r3 c1 | r3 c2 | r3 c4
r4 c1 | r4++c2 | r4 c4
"""
end
end
| 28.1 | 71 | 0.569395 |
ff4a116d5c1636224613c1e8660279e7b2e713e0 | 2,534 | exs | Elixir | exercises/concept/rpn-calculator-output/test/rpn_calculator/output_test.exs | SaschaMann/elixir | 2489747bba72a0ba5efa27e7e00441a428fdf987 | [
"MIT"
] | 1 | 2021-06-09T06:57:02.000Z | 2021-06-09T06:57:02.000Z | exercises/concept/rpn-calculator-output/test/rpn_calculator/output_test.exs | SaschaMann/elixir | 2489747bba72a0ba5efa27e7e00441a428fdf987 | [
"MIT"
] | 6 | 2022-03-04T13:05:25.000Z | 2022-03-30T18:36:49.000Z | exercises/concept/rpn-calculator-output/test/rpn_calculator/output_test.exs | SaschaMann/elixir | 2489747bba72a0ba5efa27e7e00441a428fdf987 | [
"MIT"
] | null | null | null | defmodule RPNCalculator.OutputTest do
use ExUnit.Case
import ExUnit.CaptureIO
def open(filename \\ "<nil>") do
send(self(), {:open, filename})
case filename do
"filename" -> {:ok, :stdio}
"bad_filename" -> {:ok, spawn(fn -> nil end)}
end
end
def close(_) do
send(self(), :close)
:ok
end
describe "write/3" do
@task_id 1
test "returns ok tuple if function succeeds" do
resource = __MODULE__
filename = "filename"
equation = "1 1 +"
assert {:ok, equation} == RPNCalculator.Output.write(resource, filename, equation)
end
@use_open_error_message """
Use the open/1 function from the `resource` specified in the arguments to open `filename`.
E.g.) resource.open(filename)
"""
@task_id 1
test "opens resource" do
resource = __MODULE__
filename = "filename"
equation = "1 1 +"
RPNCalculator.Output.write(resource, filename, equation)
assert_received {:open, ^filename}, @use_open_error_message
end
@use_write_error_message """
Use IO.write/2 to write to the opened `filename`.
"""
@task_id 2
test "writes to resource" do
resource = __MODULE__
filename = "filename"
equation = "1 1 +"
assert capture_io(fn -> RPNCalculator.Output.write(resource, filename, equation) end) ==
"1 1 +",
@use_write_error_message
end
@task_id 2
test "rescues and returns error tuple from raised error" do
resource = __MODULE__
bad_filename = "bad_filename"
equation = "1 1 +"
assert {:error, "Unable to write to resource"} ==
RPNCalculator.Output.write(resource, bad_filename, equation)
end
@use_close_error_message """
Use the close/1 function from the `resource` specified in the arguments to close the opened file handle.
E.g.) resource.close(filename)
"""
@task_id 3
test "closes resource" do
resource = __MODULE__
filename = "filename"
equation = "1 1 +"
RPNCalculator.Output.write(resource, filename, equation)
assert_received :close, @use_close_error_message
end
@task_id 3
test "closes resource even when rescuing from raised error" do
resource = __MODULE__
bad_filename = "bad_filename"
equation = "1 1 +"
RPNCalculator.Output.write(resource, bad_filename, equation)
assert_received :close, "write/3 should close the `resource` even if an error is raised"
end
end
end
| 26.673684 | 108 | 0.640489 |
ff4a1e19f7d9686d3b1d80c692db92c7a0c07968 | 481 | ex | Elixir | lib/rube/erc20/event_handler.ex | icecube11/rube | 5d813c5b9f7b984786ac7db6fd2eca0939bf5468 | [
"MIT"
] | null | null | null | lib/rube/erc20/event_handler.ex | icecube11/rube | 5d813c5b9f7b984786ac7db6fd2eca0939bf5468 | [
"MIT"
] | null | null | null | lib/rube/erc20/event_handler.ex | icecube11/rube | 5d813c5b9f7b984786ac7db6fd2eca0939bf5468 | [
"MIT"
] | null | null | null | defmodule Rube.Erc20.EventHandler do
alias Rube.Tokens
alias Rube.Erc20.Events
def handle_event(blockchain, %{"address" => address}, %Events.Transfer{}) do
Tokens.get_or_fetch(blockchain.id, address)
end
def handle_event(blockchain, %{"address" => address}, %Events.Mint{}) do
Tokens.get_or_fetch(blockchain.id, address)
end
def handle_event(blockchain, %{"address" => address}, %Events.Burn{}) do
Tokens.get_or_fetch(blockchain.id, address)
end
end
| 28.294118 | 78 | 0.721414 |
ff4a24e7757436ba87971b96e93c6e4924a4f941 | 1,164 | exs | Elixir | test/lib/mix/tasks/get_address_test.exs | DenisGorbachev/crypto-cli | 94e5097ff24237fbc5fdd3fea371a5c9a1f727e4 | [
"MIT"
] | 5 | 2018-09-19T09:13:15.000Z | 2021-10-20T23:29:57.000Z | test/lib/mix/tasks/get_address_test.exs | DenisGorbachev/crypto-cli | 94e5097ff24237fbc5fdd3fea371a5c9a1f727e4 | [
"MIT"
] | 6 | 2018-07-29T05:33:02.000Z | 2018-09-18T20:42:19.000Z | test/lib/mix/tasks/get_address_test.exs | DenisGorbachev/crypto-cli | 94e5097ff24237fbc5fdd3fea371a5c9a1f727e4 | [
"MIT"
] | 3 | 2018-07-24T05:55:04.000Z | 2018-09-19T09:14:08.000Z | defmodule Mix.Tasks.Get.Address.Test do
use Cryptozaur.Case, async: true
use ExVCR.Mock, adapter: ExVCR.Adapter.Hackney, options: [clear_mock: true]
test "user can get a deposit address for the specific currency", %{opts: opts} do
use_cassette "tasks/get_address_ok", match_requests_on: [:query] do
result = Mix.Tasks.Get.Address.run(opts ++ ["leverex", "ETH_T"])
assert {:ok, address} = result
assert address == "8005b9ad313bd32118809d12dedb0c39eac1adda"
assert_received {:mix_shell, :info, [msg]}
assert String.contains?(msg, "[Deposit address: 8005b9ad313bd32118809d12dedb0c39eac1adda]")
end
end
test "user can see all active orders in JSON format", %{opts: opts} do
use_cassette "tasks/get_address_ok", match_requests_on: [:query] do
result = Mix.Tasks.Get.Address.run(opts ++ ["--format", "json", "leverex", "ETH_T"])
assert {:ok, address} = result
assert address == "8005b9ad313bd32118809d12dedb0c39eac1adda"
assert_received {:mix_shell, :info, [msg]}
assert String.contains?(msg, ~s|{\n \"address\": \"8005b9ad313bd32118809d12dedb0c39eac1adda\"\n}|)
end
end
end
| 40.137931 | 105 | 0.696735 |
ff4a29dd2d102380f6bf9a3ff2393b6bb95eac2c | 764 | ex | Elixir | lib/uni_pg/pg.ex | nallwhy/uni_pg | ad661d1e39a3020b74151293ae063334db4d45fc | [
"MIT"
] | 1 | 2021-08-06T04:56:15.000Z | 2021-08-06T04:56:15.000Z | lib/uni_pg/pg.ex | nallwhy/uni_pg | ad661d1e39a3020b74151293ae063334db4d45fc | [
"MIT"
] | null | null | null | lib/uni_pg/pg.ex | nallwhy/uni_pg | ad661d1e39a3020b74151293ae063334db4d45fc | [
"MIT"
] | 1 | 2021-07-12T14:12:12.000Z | 2021-07-12T14:12:12.000Z | defmodule UniPg.Pg do
def start_link(scope) do
:pg.start_link(scope)
end
def join(scope, group, pids) when is_list(pids) do
ensure_started(scope)
:pg.join(scope, group, pids)
end
def leave(scope, group, pids) when is_list(pids) do
ensure_started(scope)
:pg.leave(scope, group, pids)
end
def get_local_members(scope, group) do
ensure_started(scope)
:pg.get_local_members(scope, group)
end
def get_members(scope, group) do
ensure_started(scope)
:pg.get_members(scope, group)
end
def which_groups(scope) do
ensure_started(scope)
:pg.which_groups(scope)
end
# Util
defp ensure_started(scope) do
# Duplicated :pg.start() has no effect and takes only 5us.
:pg.start(scope)
end
end
| 20.105263 | 62 | 0.689791 |
ff4a2a842e49b4a0f2c61c159f6d66a97e0f1bbe | 478 | exs | Elixir | priv/repo/migrations/20210205030432_create_submission_votes_table.exs | source-academy/cadet | c447552453f78799755de73f66999e4c9d20383c | [
"Apache-2.0"
] | 27 | 2018-01-20T05:56:24.000Z | 2021-05-24T03:21:55.000Z | priv/repo/migrations/20210205030432_create_submission_votes_table.exs | source-academy/cadet | c447552453f78799755de73f66999e4c9d20383c | [
"Apache-2.0"
] | 731 | 2018-04-16T13:25:49.000Z | 2021-06-22T07:16:12.000Z | priv/repo/migrations/20210205030432_create_submission_votes_table.exs | source-academy/cadet | c447552453f78799755de73f66999e4c9d20383c | [
"Apache-2.0"
] | 43 | 2018-01-20T06:35:46.000Z | 2021-05-05T03:22:35.000Z | defmodule Cadet.Repo.Migrations.AddSubmissionVotesTable do
use Ecto.Migration
def change do
create table(:submission_votes) do
add(:rank, :integer)
add(:user_id, references(:users), null: false)
add(:submission_id, references(:submissions), null: false)
add(:question_id, references(:questions), null: false)
timestamps()
end
create(unique_index(:submission_votes, [:user_id, :question_id, :rank], name: :unique_score))
end
end
| 29.875 | 97 | 0.702929 |
ff4a5abeb8572940e89139f292e3c93a002903e9 | 2,151 | ex | Elixir | lib/instream/response.ex | qgadrian/instream | 3dc828fe476817d442b83dc5da58ceca56e9886f | [
"Apache-2.0"
] | null | null | null | lib/instream/response.ex | qgadrian/instream | 3dc828fe476817d442b83dc5da58ceca56e9886f | [
"Apache-2.0"
] | null | null | null | lib/instream/response.ex | qgadrian/instream | 3dc828fe476817d442b83dc5da58ceca56e9886f | [
"Apache-2.0"
] | null | null | null | defmodule Instream.Response do
@moduledoc false
@type t :: {:error, term} | {status :: pos_integer, headers :: list, body :: String.t()}
@doc """
Maybe parses a response based on the requested result type.
"""
@spec maybe_parse(t, Keyword.t()) :: any
def maybe_parse({:error, _} = error, _), do: error
def maybe_parse({_, _, ""}, _), do: :ok
def maybe_parse({status, headers, body}, opts)
when 300 <= status do
case is_json?(headers) do
true -> maybe_decode_json(body, opts)
false -> maybe_wrap_error(body, opts)
end
end
def maybe_parse({_, headers, body}, opts) do
case is_json?(headers) do
true -> maybe_decode_json(body, opts)
false -> body
end
end
@doc """
Parses the response of a ping query.
"""
@spec parse_ping(any) :: :pong | :error
def parse_ping({:ok, 204, _}), do: :pong
def parse_ping(_), do: :error
@doc """
Parses the response of a version query.
Returns "unknown" if the response did not contain a parseable header.
"""
@spec parse_version(any) :: String.t() | :error
def parse_version({:ok, 204, headers}) do
case List.keyfind(headers, "X-Influxdb-Version", 0) do
{"X-Influxdb-Version", version} -> version
_ -> "unknown"
end
end
def parse_version(_), do: :error
@doc """
Parses the response of a status query.
"""
@spec parse_status(any) :: :ok | :error
def parse_status({:ok, 204, _}), do: :ok
def parse_status(_), do: :error
# Internal methods
defp is_json?([]), do: false
defp is_json?([{header, val} | headers]) do
if "content-type" == String.downcase(header) do
String.contains?(val, "json")
else
is_json?(headers)
end
end
defp maybe_decode_json(response, opts) do
case opts[:result_as] do
:raw ->
response
_ ->
{json_mod, json_fun, json_extra_args} = opts[:json_decoder]
apply(json_mod, json_fun, [response | json_extra_args])
end
end
defp maybe_wrap_error(error, opts) do
error = String.trim(error)
case opts[:result_as] do
:raw -> error
_ -> %{error: error}
end
end
end
| 23.9 | 90 | 0.620177 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.