hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c9047d3cee1ec23211f8754f0d34d8d28a5f5d3 | 989 | exs | Elixir | config/config.exs | kawakami-o3/epcc | e0a102b5471e133580c92feb91ec9597f1cf7021 | [
"MIT"
] | null | null | null | config/config.exs | kawakami-o3/epcc | e0a102b5471e133580c92feb91ec9597f1cf7021 | [
"MIT"
] | 3 | 2020-07-17T03:56:23.000Z | 2021-05-09T00:04:45.000Z | config/config.exs | kawakami-o3/epcc | e0a102b5471e133580c92feb91ec9597f1cf7021 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
use Mix.Config
config :epcc,
ecto_repos: [Epcc.Repo]
# Configures the endpoint
config :epcc, EpccWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "f+omJMD19iXVHFPxdywQ2wgwOZwPJfcQ/QtzZklP3wS2HoZ2eWVNKdVYbfA9yQYH",
render_errors: [view: EpccWeb.ErrorView, accepts: ~w(html json)],
pubsub: [name: Epcc.PubSub, adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 31.903226 | 86 | 0.76542 |
1c904972cad6a1bccd7d162708407a1dbe1a205d | 1,875 | exs | Elixir | test/json_schema_test_suite/draft4/optional/format/date_time_test.exs | kianmeng/xema | a990d64fb4bcd708249514daa55426ee003da25d | [
"MIT"
] | 49 | 2018-06-05T09:42:19.000Z | 2022-02-15T12:50:51.000Z | test/json_schema_test_suite/draft4/optional/format/date_time_test.exs | kianmeng/xema | a990d64fb4bcd708249514daa55426ee003da25d | [
"MIT"
] | 152 | 2017-06-11T13:43:06.000Z | 2022-01-09T17:13:45.000Z | test/json_schema_test_suite/draft4/optional/format/date_time_test.exs | kianmeng/xema | a990d64fb4bcd708249514daa55426ee003da25d | [
"MIT"
] | 6 | 2019-05-31T05:41:47.000Z | 2021-12-14T08:09:36.000Z | defmodule JsonSchemaTestSuite.Draft4.Optional.Format.DateTimeTest do
use ExUnit.Case
import Xema, only: [valid?: 2]
describe ~s|validation of date-time strings| do
setup do
%{
schema:
Xema.from_json_schema(
%{"format" => "date-time"},
draft: "draft4",
atom: :force
)
}
end
test ~s|a valid date-time string|, %{schema: schema} do
assert valid?(schema, "1963-06-19T08:30:06.283185Z")
end
test ~s|a valid date-time string without second fraction|, %{schema: schema} do
assert valid?(schema, "1963-06-19T08:30:06Z")
end
test ~s|a valid date-time string with plus offset|, %{schema: schema} do
assert valid?(schema, "1937-01-01T12:00:27.87+00:20")
end
test ~s|a valid date-time string with minus offset|, %{schema: schema} do
assert valid?(schema, "1990-12-31T15:59:50.123-08:00")
end
test ~s|a invalid day in date-time string|, %{schema: schema} do
refute valid?(schema, "1990-02-31T15:59:60.123-08:00")
end
test ~s|an invalid offset in date-time string|, %{schema: schema} do
refute valid?(schema, "1990-12-31T15:59:60-24:00")
end
test ~s|an invalid date-time string|, %{schema: schema} do
refute valid?(schema, "06/19/1963 08:30:06 PST")
end
test ~s|case-insensitive T and Z|, %{schema: schema} do
assert valid?(schema, "1963-06-19t08:30:06.283185z")
end
test ~s|only RFC3339 not all of ISO 8601 are valid|, %{schema: schema} do
refute valid?(schema, "2013-350T01:01:01")
end
test ~s|invalid non-padded month dates|, %{schema: schema} do
refute valid?(schema, "1963-6-19T08:30:06.283185Z")
end
test ~s|invalid non-padded day dates|, %{schema: schema} do
refute valid?(schema, "1963-06-1T08:30:06.283185Z")
end
end
end
| 29.761905 | 83 | 0.624 |
1c905c63901f8a88928209f91d93d6172272934c | 6,044 | exs | Elixir | test/phoenix_html/tag_test.exs | eksperimental/phoenix_html | 03fc1949bfc14802fc9fe924fd77f94359dbbcdf | [
"MIT"
] | null | null | null | test/phoenix_html/tag_test.exs | eksperimental/phoenix_html | 03fc1949bfc14802fc9fe924fd77f94359dbbcdf | [
"MIT"
] | null | null | null | test/phoenix_html/tag_test.exs | eksperimental/phoenix_html | 03fc1949bfc14802fc9fe924fd77f94359dbbcdf | [
"MIT"
] | null | null | null | defmodule Phoenix.HTML.TagTest do
use ExUnit.Case, async: true
import Phoenix.HTML
import Phoenix.HTML.Tag
doctest Phoenix.HTML.Tag
test "tag" do
assert tag(:br) |> safe_to_string() == ~s(<br>)
assert tag(:input, name: ~s("<3")) |> safe_to_string() == ~s(<input name=""<3"">)
assert tag(:input, name: raw("<3")) |> safe_to_string() == ~s(<input name="<3">)
assert tag(:input, name: :hello) |> safe_to_string() == ~s(<input name="hello">)
assert tag(:input, type: "text", name: "user_id") |> safe_to_string() ==
~s(<input name="user_id" type="text">)
assert tag(:input, data: [toggle: "dropdown"]) |> safe_to_string() ==
~s(<input data-toggle="dropdown">)
assert tag(:input, my_attr: "blah") |> safe_to_string() == ~s(<input my-attr="blah">)
assert tag(:input, data: [my_attr: "blah"]) |> safe_to_string() ==
~s(<input data-my-attr="blah">)
assert tag(:input, data: [toggle: [target: "#parent", attr: "blah"]]) |> safe_to_string() ==
~s(<input data-toggle-attr="blah" data-toggle-target="#parent">)
assert tag(:audio, autoplay: "autoplay") |> safe_to_string() ==
~s(<audio autoplay="autoplay">)
assert tag(:audio, autoplay: true) |> safe_to_string() == ~s(<audio autoplay>)
assert tag(:audio, autoplay: false) |> safe_to_string() == ~s(<audio>)
assert tag(:audio, autoplay: nil) |> safe_to_string() == ~s(<audio>)
end
test "content_tag" do
assert content_tag(:p, "Hello") |> safe_to_string() == "<p>Hello</p>"
assert content_tag(:p, "Hello", class: "dark") |> safe_to_string() ==
"<p class=\"dark\">Hello</p>"
assert content_tag(:p, [class: "dark"], do: "Hello") |> safe_to_string() ==
"<p class=\"dark\">Hello</p>"
assert content_tag(:p, "<Hello>") |> safe_to_string() == "<p><Hello></p>"
assert content_tag(:p, 13) |> safe_to_string() == "<p>13</p>"
assert content_tag(:p, [class: "dark"], do: "<Hello>") |> safe_to_string() ==
"<p class=\"dark\"><Hello></p>"
assert content_tag(:p, raw("<Hello>")) |> safe_to_string() == "<p><Hello></p>"
assert content_tag(:p, [class: "dark"], do: raw("<Hello>")) |> safe_to_string() ==
"<p class=\"dark\"><Hello></p>"
content =
content_tag :form, action: "/users", data: [remote: true] do
tag(:input, name: "user[name]")
end
assert safe_to_string(content) ==
~s(<form action="/users" data-remote="true">) <> ~s(<input name="user[name]"></form>)
assert content_tag(:p, do: "Hello") |> safe_to_string() == "<p>Hello</p>"
content =
content_tag :ul do
content_tag :li do
"Hello"
end
end
assert safe_to_string(content) == "<ul><li>Hello</li></ul>"
assert content_tag(:p, ["hello", ?\s, "world"]) |> safe_to_string() == "<p>hello world</p>"
assert content_tag(:div, [autoplay: "autoplay"], do: "") |> safe_to_string() ==
~s(<div autoplay="autoplay"></div>)
assert content_tag(:div, [autoplay: true], do: "") |> safe_to_string() ==
~s(<div autoplay></div>)
assert content_tag(:div, [autoplay: false], do: "") |> safe_to_string() == ~s(<div></div>)
assert content_tag(:div, [autoplay: nil], do: "") |> safe_to_string() == ~s(<div></div>)
end
test "img_tag" do
assert img_tag("user.png") |> safe_to_string() == ~s(<img src="user.png">)
assert img_tag("user.png", class: "big") |> safe_to_string() ==
~s(<img class="big" src="user.png">)
assert img_tag("user.png", srcset: %{"big.png" => "2x", "small.png" => "1x"})
|> safe_to_string() ==
~s(<img src="user.png" srcset="big.png 2x, small.png 1x">)
assert img_tag("user.png", srcset: [{"big.png", "2x"}, "small.png"]) |> safe_to_string() ==
~s(<img src="user.png" srcset="big.png 2x, small.png">)
assert img_tag("user.png", srcset: "big.png 2x, small.png") |> safe_to_string() ==
~s[<img src="user.png" srcset="big.png 2x, small.png">]
end
test "form_tag for get" do
assert safe_to_string(form_tag("/", method: :get)) ==
~s(<form action="/" method="get">)
assert safe_to_string(form_tag("/", method: :get)) ==
~s(<form action="/" method="get">)
end
test "form_tag for post" do
csrf_token = Plug.CSRFProtection.get_csrf_token()
assert safe_to_string(form_tag("/")) ==
~s(<form action="/" method="post">) <>
~s(<input name="_csrf_token" type="hidden" value="#{csrf_token}">)
assert safe_to_string(form_tag("/", method: :post, csrf_token: false, multipart: true)) ==
~s(<form action="/" enctype="multipart/form-data" method="post">)
end
test "form_tag for other method" do
csrf_token = Plug.CSRFProtection.get_csrf_token()
assert safe_to_string(form_tag("/", method: :put)) ==
~s(<form action="/" method="post">) <>
~s(<input name="_method" type="hidden" value="put">) <>
~s(<input name="_csrf_token" type="hidden" value="#{csrf_token}">)
end
test "form_tag with do block" do
csrf_token = Plug.CSRFProtection.get_csrf_token()
assert safe_to_string(
form_tag "/" do
"<>"
end
) ==
~s(<form action="/" method="post">) <>
~s(<input name="_csrf_token" type="hidden" value="#{csrf_token}">) <>
~s(<>) <> ~s(</form>)
assert safe_to_string(
form_tag "/", method: :get do
"<>"
end
) ==
~s(<form action="/" method="get">) <>
~s(<>) <> ~s(</form>)
end
test "csrf_meta_tag" do
csrf_token = Plug.CSRFProtection.get_csrf_token()
assert safe_to_string(csrf_meta_tag()) ==
~s(<meta charset="UTF-8" content="#{csrf_token}" csrf-param="_csrf_token" method-param="_method" name="csrf-token">)
end
end
| 37.079755 | 129 | 0.560887 |
1c9074dff7013312c0cf4a74375096b95a78b104 | 6,891 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/health_check.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/health_check.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/health_check.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.HealthCheck do
@moduledoc """
Represents a Health Check resource.
Google Compute Engine has two Health Check resources:
* [Global](/compute/docs/reference/rest/{$api_version}/healthChecks) * [Regional](/compute/docs/reference/rest/{$api_version}/regionHealthChecks)
Internal HTTP(S) load balancers must use regional health checks (`compute.v1.regionHealthChecks`).
Traffic Director must use global health checks (`compute.v1.HealthChecks`).
Internal TCP/UDP load balancers can use either regional or global health checks (`compute.v1.regionHealthChecks` or `compute.v1.HealthChecks`).
External HTTP(S), TCP proxy, and SSL proxy load balancers as well as managed instance group auto-healing must use global health checks (`compute.v1.HealthChecks`).
Network load balancers must use legacy HTTP health checks (httpHealthChecks).
For more information, see Health checks overview.
## Attributes
* `checkIntervalSec` (*type:* `integer()`, *default:* `nil`) - How often (in seconds) to send a health check. The default value is 5 seconds.
* `creationTimestamp` (*type:* `String.t`, *default:* `nil`) - [Output Only] Creation timestamp in 3339 text format.
* `description` (*type:* `String.t`, *default:* `nil`) - An optional description of this resource. Provide this property when you create the resource.
* `grpcHealthCheck` (*type:* `GoogleApi.Compute.V1.Model.GRPCHealthCheck.t`, *default:* `nil`) -
* `healthyThreshold` (*type:* `integer()`, *default:* `nil`) - A so-far unhealthy instance will be marked healthy after this many consecutive successes. The default value is 2.
* `http2HealthCheck` (*type:* `GoogleApi.Compute.V1.Model.HTTP2HealthCheck.t`, *default:* `nil`) -
* `httpHealthCheck` (*type:* `GoogleApi.Compute.V1.Model.HTTPHealthCheck.t`, *default:* `nil`) -
* `httpsHealthCheck` (*type:* `GoogleApi.Compute.V1.Model.HTTPSHealthCheck.t`, *default:* `nil`) -
* `id` (*type:* `String.t`, *default:* `nil`) - [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* `kind` (*type:* `String.t`, *default:* `compute#healthCheck`) - Type of the resource.
* `logConfig` (*type:* `GoogleApi.Compute.V1.Model.HealthCheckLogConfig.t`, *default:* `nil`) - Configure logging on this health check.
* `name` (*type:* `String.t`, *default:* `nil`) - Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. For example, a name that is 1-63 characters long, matches the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`, and otherwise complies with RFC1035. This regular expression describes a name where the first character is a lowercase letter, and all following characters are a dash, lowercase letter, or digit, except the last character, which isn't a dash.
* `region` (*type:* `String.t`, *default:* `nil`) - [Output Only] Region where the health check resides. Not applicable to global health checks.
* `selfLink` (*type:* `String.t`, *default:* `nil`) - [Output Only] Server-defined URL for the resource.
* `sslHealthCheck` (*type:* `GoogleApi.Compute.V1.Model.SSLHealthCheck.t`, *default:* `nil`) -
* `tcpHealthCheck` (*type:* `GoogleApi.Compute.V1.Model.TCPHealthCheck.t`, *default:* `nil`) -
* `timeoutSec` (*type:* `integer()`, *default:* `nil`) - How long (in seconds) to wait before claiming failure. The default value is 5 seconds. It is invalid for timeoutSec to have greater value than checkIntervalSec.
* `type` (*type:* `String.t`, *default:* `nil`) - Specifies the type of the healthCheck, either TCP, SSL, HTTP, HTTPS or HTTP2. If not specified, the default is TCP. Exactly one of the protocol-specific health check field must be specified, which must match type field.
* `unhealthyThreshold` (*type:* `integer()`, *default:* `nil`) - A so-far healthy instance will be marked unhealthy after this many consecutive failures. The default value is 2.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:checkIntervalSec => integer(),
:creationTimestamp => String.t(),
:description => String.t(),
:grpcHealthCheck => GoogleApi.Compute.V1.Model.GRPCHealthCheck.t(),
:healthyThreshold => integer(),
:http2HealthCheck => GoogleApi.Compute.V1.Model.HTTP2HealthCheck.t(),
:httpHealthCheck => GoogleApi.Compute.V1.Model.HTTPHealthCheck.t(),
:httpsHealthCheck => GoogleApi.Compute.V1.Model.HTTPSHealthCheck.t(),
:id => String.t(),
:kind => String.t(),
:logConfig => GoogleApi.Compute.V1.Model.HealthCheckLogConfig.t(),
:name => String.t(),
:region => String.t(),
:selfLink => String.t(),
:sslHealthCheck => GoogleApi.Compute.V1.Model.SSLHealthCheck.t(),
:tcpHealthCheck => GoogleApi.Compute.V1.Model.TCPHealthCheck.t(),
:timeoutSec => integer(),
:type => String.t(),
:unhealthyThreshold => integer()
}
field(:checkIntervalSec)
field(:creationTimestamp)
field(:description)
field(:grpcHealthCheck, as: GoogleApi.Compute.V1.Model.GRPCHealthCheck)
field(:healthyThreshold)
field(:http2HealthCheck, as: GoogleApi.Compute.V1.Model.HTTP2HealthCheck)
field(:httpHealthCheck, as: GoogleApi.Compute.V1.Model.HTTPHealthCheck)
field(:httpsHealthCheck, as: GoogleApi.Compute.V1.Model.HTTPSHealthCheck)
field(:id)
field(:kind)
field(:logConfig, as: GoogleApi.Compute.V1.Model.HealthCheckLogConfig)
field(:name)
field(:region)
field(:selfLink)
field(:sslHealthCheck, as: GoogleApi.Compute.V1.Model.SSLHealthCheck)
field(:tcpHealthCheck, as: GoogleApi.Compute.V1.Model.TCPHealthCheck)
field(:timeoutSec)
field(:type)
field(:unhealthyThreshold)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.HealthCheck do
def decode(value, options) do
GoogleApi.Compute.V1.Model.HealthCheck.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.HealthCheck do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 58.897436 | 546 | 0.711218 |
1c908dc4c5a004a2e8f042df3ad386d27eac222e | 146 | exs | Elixir | test/ironman_test.exs | TheEndIsNear/ironman | 769e6777e93759810b168e7b6819ef7cb9ab5f80 | [
"MIT"
] | 39 | 2019-02-19T21:59:35.000Z | 2021-03-31T21:43:58.000Z | test/ironman_test.exs | TheEndIsNear/ironman | 769e6777e93759810b168e7b6819ef7cb9ab5f80 | [
"MIT"
] | 17 | 2019-03-13T15:02:52.000Z | 2020-11-09T20:33:44.000Z | test/ironman_test.exs | TheEndIsNear/ironman | 769e6777e93759810b168e7b6819ef7cb9ab5f80 | [
"MIT"
] | 5 | 2020-01-30T13:51:15.000Z | 2020-10-25T18:27:59.000Z | defmodule IronmanTest do
use ExUnit.Case
# doctest Ironman
# test "greets the world" do
# assert Ironman.hello() == :world
# end
end
| 16.222222 | 38 | 0.671233 |
1c908fa04993d1871ca94c33b306c65e6ee22d5e | 1,250 | exs | Elixir | priv/example-projects/foo_bar_umbrella/config/prod.secret.exs | c4710n/phx_custom | 19080ed8896be9ae846d12b2f631a1bc4aef5c78 | [
"MIT"
] | null | null | null | priv/example-projects/foo_bar_umbrella/config/prod.secret.exs | c4710n/phx_custom | 19080ed8896be9ae846d12b2f631a1bc4aef5c78 | [
"MIT"
] | null | null | null | priv/example-projects/foo_bar_umbrella/config/prod.secret.exs | c4710n/phx_custom | 19080ed8896be9ae846d12b2f631a1bc4aef5c78 | [
"MIT"
] | null | null | null | # In this file, we load production configuration and secrets
# from environment variables. You can also hardcode secrets,
# although such is generally not recommended and you have to
# remember to add this file to your .gitignore.
use Mix.Config
database_url =
System.get_env("DATABASE_URL") ||
raise """
environment variable DATABASE_URL is missing.
For example: ecto://USER:PASS@HOST/DATABASE
"""
config :foo_bar, FooBar.Repo,
# ssl: true,
url: database_url,
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10")
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
"""
config :foo_bar_web, FooBarWeb.Endpoint,
http: [
port: String.to_integer(System.get_env("PORT") || "4000"),
transport_options: [socket_opts: [:inet6]]
],
secret_key_base: secret_key_base
# ## Using releases (Elixir v1.9+)
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start each relevant endpoint:
#
# config :foo_bar_web, FooBarWeb.Endpoint, server: true
#
# Then you can assemble a release by calling `mix release`.
# See `mix help release` for more information.
| 29.761905 | 67 | 0.72 |
1c90d3b0e7b17a8717f62d98b6aaa98d20f44cc2 | 1,183 | ex | Elixir | lib/mix/lib/mix/tasks/loadpaths.ex | enokd/elixir | e39b32f235082b8a29fcb22d250c822cca98609f | [
"Apache-2.0"
] | 1 | 2015-11-12T19:23:45.000Z | 2015-11-12T19:23:45.000Z | lib/mix/lib/mix/tasks/loadpaths.ex | enokd/elixir | e39b32f235082b8a29fcb22d250c822cca98609f | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/loadpaths.ex | enokd/elixir | e39b32f235082b8a29fcb22d250c822cca98609f | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Loadpaths do
use Mix.Task
@moduledoc """
Load the application and its dependencies paths.
## Command line options
* `--no-elixir-version-check` - do not check elixir version
"""
def run(args) do
{ opts, _, _ } = OptionParser.parse(args)
unless opts[:no_elixir_version_check] do
config = Mix.project
if req = config[:elixir] do
case Version.parse_requirement(req) do
{ :ok, req } ->
unless Version.match?(System.version, req) do
raise Mix.ElixirVersionError, target: config[:app] || Mix.Project.get,
expected: req,
actual: System.version
end
:error ->
raise Mix.Error, message: "Invalid Elixir version requirement #{req} in mix.exs file"
end
end
end
# Force recompile if we have a version mismatch.
# Skip it for umbrella apps since they have no build.
old_vsn = Mix.Dep.Lock.elixir_vsn
if old_vsn && old_vsn != System.version, do: Mix.Dep.Lock.touch
Enum.each Mix.Project.load_paths, &Code.prepend_path(&1)
end
end
| 29.575 | 97 | 0.593407 |
1c90ddcfa798e966fdd62577d9a159399f9663dc | 187 | ex | Elixir | test/support/post_with_nanoid.ex | chaitanyapandit/ecto_identifier | ca04e649064e1c2aa6b05d616031802636a9e18a | [
"Apache-2.0"
] | 15 | 2018-09-23T17:13:07.000Z | 2021-11-09T14:54:05.000Z | test/support/post_with_nanoid.ex | chaitanyapandit/ecto_identifier | ca04e649064e1c2aa6b05d616031802636a9e18a | [
"Apache-2.0"
] | 3 | 2021-05-17T16:01:18.000Z | 2021-06-13T20:29:18.000Z | test/support/post_with_nanoid.ex | chaitanyapandit/ecto_identifier | ca04e649064e1c2aa6b05d616031802636a9e18a | [
"Apache-2.0"
] | 5 | 2019-06-21T11:41:36.000Z | 2020-12-27T02:38:32.000Z | defmodule EctoIdentifier.PostWithNanoid do
@moduledoc false
use Ecto.Schema
alias Ecto.Nanoid
schema("posts_nanoid") do
field(:number, Nanoid, autogenerate: true)
end
end
| 17 | 46 | 0.748663 |
1c90de616800846e92621aaf615b34ae3064daa1 | 803 | exs | Elixir | apps/ewallet_config/test/ewallet_config/utils/helpers/input_attribute_test.exs | amadeobrands/ewallet | 505b7822721940a7b892a9b35c225e80cc8ac0b4 | [
"Apache-2.0"
] | 1 | 2018-12-07T06:21:21.000Z | 2018-12-07T06:21:21.000Z | apps/ewallet_config/test/ewallet_config/utils/helpers/input_attribute_test.exs | amadeobrands/ewallet | 505b7822721940a7b892a9b35c225e80cc8ac0b4 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_config/test/ewallet_config/utils/helpers/input_attribute_test.exs | amadeobrands/ewallet | 505b7822721940a7b892a9b35c225e80cc8ac0b4 | [
"Apache-2.0"
] | null | null | null | defmodule EWalletConfig.Helpers.InputAttributeTest do
use ExUnit.Case
alias EWalletConfig.Helpers.InputAttribute
describe "get/2" do
test "returns the value if the map key is atom and argument is atom" do
assert InputAttribute.get(%{match: "matched"}, :match) == "matched"
end
test "returns the value if the map key is string and argument is atom" do
assert InputAttribute.get(%{"match" => "matched"}, :match) == "matched"
end
test "returns the value if the map key is string and argument is string" do
assert InputAttribute.get(%{"match" => "matched"}, "match") == "matched"
end
test "returns the value if the map key is atom and argument is string" do
assert InputAttribute.get(%{match: "matched"}, "match") == "matched"
end
end
end
| 34.913043 | 79 | 0.683686 |
1c911de7ec89f3b6ca4a46da6ba4549bdbffc8ea | 7,342 | exs | Elixir | .credo.exs | AkioCode/elxpro-blog | 236984915851b91058e091414deb70c5e8fed72a | [
"MIT"
] | null | null | null | .credo.exs | AkioCode/elxpro-blog | 236984915851b91058e091414deb70c5e8fed72a | [
"MIT"
] | 4 | 2021-08-11T03:19:33.000Z | 2021-09-26T01:29:58.000Z | .credo.exs | AkioCode/elxpro-blog | 236984915851b91058e091414deb70c5e8fed72a | [
"MIT"
] | null | null | null | # This file contains the configuration for Credo and you are probably reading
# this after creating it with `mix credo.gen.config`.
#
# If you find anything wrong or unclear in this file, please report an
# issue on GitHub: https://github.com/rrrene/credo/issues
#
%{
#
# You can have as many configs as you like in the `configs:` field.
configs: [
%{
#
# Run any config using `mix credo -C <name>`. If no config name is given
# "default" is used.
#
name: "default",
#
# These are the files included in the analysis:
files: %{
#
# You can give explicit globs or simply directories.
# In the latter case `**/*.{ex,exs}` will be used.
#
included: [
"lib/",
"src/",
"test/",
"web/",
"apps/*/lib/",
"apps/*/src/",
"apps/*/test/",
"apps/*/web/"
],
excluded: [~r"/_build/", ~r"/deps/", ~r"/node_modules/"]
},
#
# Load and configure plugins here:
#
plugins: [],
#
# If you create your own checks, you must specify the source files for
# them here, so they can be loaded by Credo before running the analysis.
#
requires: [],
#
# If you want to enforce a style guide and need a more traditional linting
# experience, you can change `strict` to `true` below:
#
strict: true,
#
# To modify the timeout for parsing files, change this value:
#
parse_timeout: 5000,
#
# If you want to use uncolored output by default, you can change `color`
# to `false` below:
#
color: true,
#
# You can customize the parameters of any check by adding a second element
# to the tuple.
#
# To disable a check put `false` as second element:
#
# {Credo.Check.Design.DuplicatedCode, false}
#
checks: [
#
## Consistency Checks
#
{Credo.Check.Consistency.ExceptionNames, []},
{Credo.Check.Consistency.LineEndings, []},
{Credo.Check.Consistency.ParameterPatternMatching, []},
{Credo.Check.Consistency.SpaceAroundOperators, []},
{Credo.Check.Consistency.SpaceInParentheses, []},
{Credo.Check.Consistency.TabsOrSpaces},
#
## Design Checks
#
# You can customize the priority of any check
# Priority values are: `low, normal, high, higher`
#
{Credo.Check.Design.AliasUsage,
[priority: :low, if_nested_deeper_than: 2, if_called_more_often_than: 0]},
# You can also customize the exit_status of each check.
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
#
{Credo.Check.Design.TagTODO, [exit_status: 2]},
{Credo.Check.Design.TagFIXME, []},
#
## Readability Checks
#
{Credo.Check.Readability.AliasOrder, []},
{Credo.Check.Readability.FunctionNames, []},
{Credo.Check.Readability.LargeNumbers, []},
{Credo.Check.Readability.MaxLineLength, [priority: :low, max_length: 100]},
{Credo.Check.Readability.ModuleAttributeNames, []},
{Credo.Check.Readability.ModuleDoc, []},
{Credo.Check.Readability.ModuleNames, []},
{Credo.Check.Readability.ParenthesesInCondition, []},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs, []},
{Credo.Check.Readability.PredicateFunctionNames, []},
{Credo.Check.Readability.PreferImplicitTry, []},
{Credo.Check.Readability.RedundantBlankLines, []},
{Credo.Check.Readability.Semicolons, []},
{Credo.Check.Readability.SpaceAfterCommas, []},
{Credo.Check.Readability.StringSigils, []},
{Credo.Check.Readability.TrailingBlankLine, []},
{Credo.Check.Readability.TrailingWhiteSpace, []},
{Credo.Check.Readability.UnnecessaryAliasExpansion, []},
{Credo.Check.Readability.VariableNames, []},
#
## Refactoring Opportunities
#
{Credo.Check.Refactor.CondStatements, []},
{Credo.Check.Refactor.CyclomaticComplexity, []},
{Credo.Check.Refactor.FunctionArity, []},
{Credo.Check.Refactor.LongQuoteBlocks, []},
{Credo.Check.Refactor.MapInto, false},
{Credo.Check.Refactor.MatchInCondition, []},
{Credo.Check.Refactor.NegatedConditionsInUnless, []},
{Credo.Check.Refactor.NegatedConditionsWithElse, []},
{Credo.Check.Refactor.Nesting, []},
{Credo.Check.Refactor.UnlessWithElse, []},
{Credo.Check.Refactor.WithClauses, []},
#
## Warnings
#
{Credo.Check.Warning.ApplicationConfigInModuleAttribute, []},
{Credo.Check.Warning.BoolOperationOnSameValues, []},
{Credo.Check.Warning.ExpensiveEmptyEnumCheck, []},
{Credo.Check.Warning.IExPry, []},
{Credo.Check.Warning.IoInspect, []},
{Credo.Check.Warning.LazyLogging, false},
{Credo.Check.Warning.MixEnv, false},
{Credo.Check.Warning.OperationOnSameValues, []},
{Credo.Check.Warning.OperationWithConstantResult, []},
{Credo.Check.Warning.RaiseInsideRescue, []},
{Credo.Check.Warning.UnusedEnumOperation, []},
{Credo.Check.Warning.UnusedFileOperation, []},
{Credo.Check.Warning.UnusedKeywordOperation, []},
{Credo.Check.Warning.UnusedListOperation, []},
{Credo.Check.Warning.UnusedPathOperation, []},
{Credo.Check.Warning.UnusedRegexOperation, []},
{Credo.Check.Warning.UnusedStringOperation, []},
{Credo.Check.Warning.UnusedTupleOperation, []},
{Credo.Check.Warning.UnsafeExec, []},
#
# Checks scheduled for next check update (opt-in for now, just replace `false` with `[]`)
#
# Controversial and experimental checks (opt-in, just replace `false` with `[]`)
#
{Credo.Check.Consistency.MultiAliasImportRequireUse, false},
{Credo.Check.Consistency.UnusedVariableNames, false},
{Credo.Check.Design.DuplicatedCode, false},
{Credo.Check.Readability.AliasAs, false},
{Credo.Check.Readability.BlockPipe, false},
{Credo.Check.Readability.ImplTrue, false},
{Credo.Check.Readability.MultiAlias, false},
{Credo.Check.Readability.SeparateAliasRequire, false},
{Credo.Check.Readability.SinglePipe, false},
{Credo.Check.Readability.Specs, false},
{Credo.Check.Readability.StrictModuleLayout, false},
{Credo.Check.Readability.WithCustomTaggedTuple, false},
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.AppendSingleItem, false},
{Credo.Check.Refactor.DoubleBooleanNegation, false},
{Credo.Check.Refactor.ModuleDependencies, false},
{Credo.Check.Refactor.NegatedIsNil, false},
{Credo.Check.Refactor.PipeChainStart, false},
{Credo.Check.Refactor.VariableRebinding, false},
{Credo.Check.Warning.LeakyEnvironment, false},
{Credo.Check.Warning.MapGetUnsafePass, false},
{Credo.Check.Warning.UnsafeToAtom, false}
#
# Custom checks can be created using `mix credo.gen.check`.
#
]
}
]
}
| 38.642105 | 97 | 0.61441 |
1c91227b4e5bbdb7d517ee76b08ee668a7ddda2f | 2,307 | ex | Elixir | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_stored_info_type.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_stored_info_type.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_stored_info_type.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2StoredInfoType do
@moduledoc """
StoredInfoType resource message that contains information about the current version and any pending updates.
## Attributes
* `currentVersion` (*type:* `GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2StoredInfoTypeVersion.t`, *default:* `nil`) - Current version of the stored info type.
* `name` (*type:* `String.t`, *default:* `nil`) - Resource name.
* `pendingVersions` (*type:* `list(GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2StoredInfoTypeVersion.t)`, *default:* `nil`) - Pending versions of the stored info type. Empty if no versions are pending.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:currentVersion =>
GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2StoredInfoTypeVersion.t() | nil,
:name => String.t() | nil,
:pendingVersions =>
list(GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2StoredInfoTypeVersion.t()) | nil
}
field(:currentVersion, as: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2StoredInfoTypeVersion)
field(:name)
field(:pendingVersions,
as: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2StoredInfoTypeVersion,
type: :list
)
end
defimpl Poison.Decoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2StoredInfoType do
def decode(value, options) do
GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2StoredInfoType.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2StoredInfoType do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.101695 | 202 | 0.742523 |
1c91291c4e6043c24616b7f05a26a87889138fea | 3,552 | exs | Elixir | test/scenic/view_port/tables_test.exs | zacck/scenic | 5f2170b7fba63b0af597ddeb3107fb1ffb79c2fe | [
"Apache-2.0"
] | null | null | null | test/scenic/view_port/tables_test.exs | zacck/scenic | 5f2170b7fba63b0af597ddeb3107fb1ffb79c2fe | [
"Apache-2.0"
] | null | null | null | test/scenic/view_port/tables_test.exs | zacck/scenic | 5f2170b7fba63b0af597ddeb3107fb1ffb79c2fe | [
"Apache-2.0"
] | null | null | null | #
# Created by Boyd Multerer on August 22, 2018.
# Copyright © 2018 Kry10 Industries. All rights reserved.
#
# ==============================================================================
defmodule Scenic.ViewPort.TablesTest do
use ExUnit.Case, async: false
doctest Scenic.ViewPort.Tables
alias Scenic.Graph
alias Scenic.ViewPort.Tables
import Scenic.Primitives
# ets table names
@ets_graphs_table :_scenic_graphs_table_
@graph Graph.build()
|> text("Main Graph")
@graph_1 Graph.build()
|> text("Second Graph")
# --------------------------------------------------------
setup do
{:ok, svc} = Tables.start_link(nil)
on_exit(fn -> Process.exit(svc, :normal) end)
%{svc: svc}
end
# ============================================================================
# integration style tests
test "integration style test" do
{:ok, agent_0} = Agent.start(fn -> 1 + 1 end)
{:ok, agent_1} = Agent.start(fn -> 1 + 1 end)
scene_ref = make_ref()
graph_key = {:graph, scene_ref, 123}
registration = {self(), agent_0, agent_1}
# register
Tables.register_scene(scene_ref, registration)
# is an async cast, so sleep to let it run
Process.sleep(100)
# confirm the registration by checking the scene
assert Tables.get_scene_pid(scene_ref) == {:ok, self()}
assert Tables.get_scene_pid(graph_key) == {:ok, self()}
# insert a graph
Tables.insert_graph(graph_key, self(), @graph, [])
# not subscribed, so confirm no event received - also gives it time to process
refute_receive({:"$gen_cast", {:update_graph, {:graph, ^scene_ref, 123}}})
assert :ets.lookup(@ets_graphs_table, graph_key) == [{graph_key, self(), @graph, []}]
assert Tables.get_graph(graph_key) == {:ok, @graph}
assert Tables.get_refs(graph_key) == {:ok, []}
assert Tables.get_graph_refs(graph_key) == {:ok, @graph, []}
# subscribe to the graph_key
Tables.subscribe(graph_key, self())
# update the graph
Tables.insert_graph(graph_key, self(), @graph_1, [])
# subscribed. confirm event received - also gives it time to process
assert_receive({:"$gen_cast", {:update_graph, {:graph, ^scene_ref, 123}}})
assert :ets.lookup(@ets_graphs_table, graph_key) == [{graph_key, self(), @graph_1, []}]
assert Tables.get_graph(graph_key) == {:ok, @graph_1}
# unsubscribe to the graph_key
Tables.unsubscribe(graph_key, self())
# confirm unsubscripted
Tables.insert_graph(graph_key, self(), @graph, [])
# not subscribed, so confirm no event received - also gives it time to process
refute_receive({:"$gen_cast", {:update_graph, {:graph, ^scene_ref, 123}}})
assert :ets.lookup(@ets_graphs_table, graph_key) == [{graph_key, self(), @graph, []}]
# subscribe to the graph_key again
Tables.subscribe(graph_key, self())
# update the graph
Tables.insert_graph(graph_key, self(), @graph_1, [])
# subscribed. confirm event received - also gives it time to process
assert_receive({:"$gen_cast", {:update_graph, {:graph, ^scene_ref, 123}}})
assert :ets.lookup(@ets_graphs_table, graph_key) == [{graph_key, self(), @graph_1, []}]
assert Tables.get_graph(graph_key) == {:ok, @graph_1}
# delete the graph
Tables.delete_graph(graph_key)
assert_receive({:"$gen_cast", {:delete_graph, {:graph, ^scene_ref, 123}}})
assert :ets.lookup(@ets_graphs_table, graph_key) == []
assert Tables.get_graph(graph_key) == nil
Agent.stop(agent_0)
Agent.stop(agent_1)
end
end
| 35.878788 | 91 | 0.626971 |
1c9150814b0226cd6eeb4c6f20f80763dd86c9dc | 1,506 | ex | Elixir | lib/tipalti/api/soap/response_parser.ex | dhedlund/tipalti-elixir | 8d351261234591bf328309b404324ebb2aa1c4b2 | [
"MIT"
] | null | null | null | lib/tipalti/api/soap/response_parser.ex | dhedlund/tipalti-elixir | 8d351261234591bf328309b404324ebb2aa1c4b2 | [
"MIT"
] | null | null | null | lib/tipalti/api/soap/response_parser.ex | dhedlund/tipalti-elixir | 8d351261234591bf328309b404324ebb2aa1c4b2 | [
"MIT"
] | null | null | null | defmodule Tipalti.API.SOAP.ResponseParser do
@moduledoc false
import SweetXml
alias Tipalti.ClientError
def parse(body, root_path, :empty, response_opts) do
with :ok <- is_ok?(body, root_path, response_opts) do
:ok
end
end
def parse(body, root_path, %SweetXpath{} = path, response_opts) do
document = xpath(body, ~x"/"e)
with :ok <- is_ok?(document, root_path, response_opts) do
element = xpath(document, root_path)
{:ok, xpath(element, path)}
end
end
def parse(body, root_path, [%SweetXpath{} = path | mapping], response_opts) do
document = xpath(body, ~x"/"e)
with :ok <- is_ok?(document, root_path, response_opts) do
element = xpath(document, root_path)
{:ok, xpath(element, path, mapping)}
end
end
def parse(body, root_path, mapping, response_opts) do
document = xpath(body, ~x"/"e)
with :ok <- is_ok?(document, root_path, response_opts) do
{:ok, xpath(document, root_path, mapping)}
end
end
def parse_without_errors(body, root_path, [path | mapping]) do
document = xpath(body, ~x"/"e)
element = xpath(document, root_path)
xpath(element, path, mapping)
end
defp is_ok?(document, root_path, response_opts) do
ok_code = response_opts[:ok_code]
error_paths = response_opts[:error_paths]
case xpath(document, root_path, error_paths) do
%{error_code: ^ok_code} ->
:ok
error ->
{:error, ClientError.from_map!(error)}
end
end
end
| 25.525424 | 80 | 0.658035 |
1c9178376fdc8ef0affa01915256bc37f5afa4bd | 1,648 | ex | Elixir | web/web.ex | openrowing/raceman2 | eee2d51c50bddf63b9c5b9b351424d4c056fa27d | [
"Apache-2.0"
] | null | null | null | web/web.ex | openrowing/raceman2 | eee2d51c50bddf63b9c5b9b351424d4c056fa27d | [
"Apache-2.0"
] | null | null | null | web/web.ex | openrowing/raceman2 | eee2d51c50bddf63b9c5b9b351424d4c056fa27d | [
"Apache-2.0"
] | null | null | null | defmodule Web.Web do
@moduledoc """
A module that keeps using definitions for controllers,
views and so on.
This can be used in your application as:
use Web.Web, :controller
use Web.Web, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below.
"""
def model do
quote do
use Ecto.Schema
import Ecto
import Ecto.Changeset
import Ecto.Query, only: [from: 1, from: 2]
end
end
def controller do
quote do
use Phoenix.Controller
alias Web.Repo
import Ecto
import Ecto.Query, only: [from: 1, from: 2]
import Web.Router.Helpers
import Web.Gettext
end
end
def view do
quote do
use Phoenix.View, root: "web/templates"
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import Web.Router.Helpers
import Web.ErrorHelpers
import Web.Gettext
end
end
def router do
quote do
use Phoenix.Router
end
end
def channel do
quote do
use Phoenix.Channel
alias Web.Repo
import Ecto
import Ecto.Query, only: [from: 1, from: 2]
import Web.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 20.097561 | 88 | 0.651092 |
1c91811565727ad49e27f18ae81b681df968ba6f | 8,604 | ex | Elixir | lib/davis/stream.ex | jquadrino/davis | 28b405725855303b7e78ed5db22ded223dfb2dc8 | [
"Apache-2.0"
] | 1 | 2019-08-01T22:51:17.000Z | 2019-08-01T22:51:17.000Z | lib/davis/stream.ex | jquadrino/davis | 28b405725855303b7e78ed5db22ded223dfb2dc8 | [
"Apache-2.0"
] | null | null | null | lib/davis/stream.ex | jquadrino/davis | 28b405725855303b7e78ed5db22ded223dfb2dc8 | [
"Apache-2.0"
] | null | null | null | defmodule Davis.Stream do
@moduledoc false
import Davis.Frame.Flag, only: :macros
alias Davis.Headers
alias Davis.Frame
alias Davis.Stream.FSM
defstruct id: nil, state: :idle, window_size: nil, content_length: nil, buffered_content_length: 0
def log(msg) do
:error_logger.format('~p', [msg])
end
# Header Continuation validation
def event(frame_type, _, _, _, %{cont_stream_id: cont_stream_id} = conn_state)
when not is_nil(cont_stream_id) and not frame_type == :continutation do
%{conn_state|error: {:connection_error, :protocol_error}}
end
def event(_, %{id: id}, _, _, %{cont_stream_id: cont_stream_id} = conn_state)
when not is_nil(cont_stream_id) and cont_stream_id != id do
%{conn_state|error: {:connection_error, :protocol_error}}
end
# New Stream
def event(_, {:new_stream, _}, _, _, %{settings: %{max_concurrent_streams: max_concurrent_streams},
concurrent_count: concurrent_count} = conn_state)
when concurrent_count == max_concurrent_streams do
%{conn_state|error: {:connection_error, :protocol_error}}
end
def event(_, {:new_stream, 0}, _, _, conn_state) do
%{conn_state|error: {:connection_error, :protocol_error}}
end
# Headers
def event(:headers, {:new_stream, stream_id}, _, _, %{last_stream_id: last_stream_id} = conn_state)
when stream_id < last_stream_id do
%{conn_state|error: {:connection_error, :protocol_error}}
end
def event(:headers, {:new_stream, stream_id}, flags, payload, conn_state) do
stream = %__MODULE__{id: stream_id, window_size: conn_state.settings.initial_window_size}
conn_state = %{conn_state|last_stream_id: stream_id}
event(:headers, stream, flags, payload, conn_state)
end
def event(_, {:new_stream, _}, _, _, conn_state) do
%{conn_state|error: {:connection_error, :protocol_error}}
end
def event(:headers, stream, flags, {priority, data}, conn_state) do
conn_state = event(:priority, stream, flags, priority, conn_state)
event(:headers, stream, flags, data, conn_state)
end
def event(:headers, stream, flags, data, %{decode_context: decode_context,
cont_stream_id: cont_stream_id} = conn_state) do
new_state = FSM.next_state(:headers, stream.state, flags)
conn_state = handle_concurrency_count(stream.state, new_state, conn_state)
case new_state do
{:cont, then_state} when is_nil(cont_stream_id) ->
streams = Map.put(conn_state.streams, stream.id, %{stream|state: new_state})
%{conn_state|streams: streams, cont_stream_id: stream.id, cont_buffer: data}
{:cont, then_state} ->
%{conn_state|error: {:connection_error, :protocol_error}}
{:error, error} ->
%{conn_state|error: {:connection_error, error}}
_ ->
log(stream.id)
case Headers.decode(data, decode_context) do
{:error, error} ->
%{conn_state|error: {:connection_error, error}}
{:ok, headers, decode_context} ->
log(stream.id)
content_length = Keyword.get(headers, :'content-length') |> Davis.Headers.parse_content_length
stream = %{stream|state: new_state, content_length: content_length}
streams = Map.put(conn_state.streams, stream.id, stream)
%{conn_state|streams: streams, decode_context: decode_context}
end
end
end
def event(:continuation, stream, flags, data, %{decode_context: decode_context,
cont_stream_id: cont_stream_id} = conn_state) do
new_state = FSM.next_state(:continuation, stream.state, flags)
conn_state = handle_concurrency_count(stream.state, new_state, conn_state)
id = stream.id
case new_state do
{:cont, then_state} when id == cont_stream_id ->
%{conn_state|cont_stream_id: stream.id, cont_buffer: conn_state.cont_buffer <> data}
{:cont, then_state} ->
%{conn_state|error: {:connection_error, :protocol_error}}
{:error, error} ->
%{conn_state|error: {:connection_error, error}}
_ ->
data = conn_state.cont_buffer <> data
conn_state = %{conn_state|cont_stream_id: nil, cont_buffer: nil}
case Headers.decode(data, decode_context) do
{:error, error} ->
%{conn_state|error: {:connection_error, error}}
{:ok, headers, decode_context} ->
content_length = Keyword.get(headers, :'content-length') |> Davis.Headers.parse_content_length
stream = %{stream|state: new_state, content_length: content_length}
streams = Map.put(conn_state.streams, stream.id, stream)
%{conn_state|streams: streams, decode_context: decode_context}
end
end
end
# Push promise
def event(:push_promise, stream, flags, {promised_id, data}, %{cont_stream_id: cont_stream_id} = conn_state) do
new_state = FSM.next_state(:headers, stream.state, flags)
conn_state = handle_concurrency_count(stream.state, new_state, conn_state)
case new_state do
{:cont, then_state} when is_nil(cont_stream_id) ->
%{conn_state|cont_stream_id: stream.id, cont_buffer: data}
{:cont, then_state} ->
%{conn_state|error: {:connection_error, :protocol_error}}
{:error, error} ->
%{conn_state|error: {:connection_error, error}}
_ ->
streams = Map.put(conn_state.streams, stream.id, %{stream|state: new_state})
%{conn_state|streams: streams, cont_stream_id: stream.id, cont_buffer: data}
end
end
# Priority
def event(:priority, _, %{id: id}, {_, stream_dependency, _}, conn_state) when stream_dependency == id,
do: %{conn_state|error: {:connection_error, :protocol_error}}
def event(:priority, _, _, data, conn_state), do: conn_state
# Data
def event(:data, stream, flags, data, conn_state) do
new_state = FSM.next_state(:data, stream.state, flags)
conn_state = handle_concurrency_count(stream.state, new_state, conn_state)
%{buffered_content_length: buffered_content_length, content_length: content_length} = stream
# flow control byte size
case new_state do
s when s in [:open, :half_closed] and not is_nil(content_length) and buffered_content_length + byte_size(data) > content_length ->
%{conn_state|error: {:connection_error, :protocol_error}}
s when s in [:open, :half_closed] ->
stream = %{stream|state: :half_closed, buffered_content_length: buffered_content_length + byte_size(data)}
streams = %{conn_state.streams|stream.id => stream}
%{conn_state|streams: streams}
{:error, error} ->
%{conn_state|error: {:connection_error, error}}
end
end
# Reset
def event(:rst_stream, stream, _, data, conn_state) do
streams = %{conn_state.streams|stream.id => %{stream|state: :closed}}
%{conn_state|streams: streams}
end
# Window update
def event(:window_update, _, _, 0, conn_state),
do: %{conn_state|error: {:connection_error, :flow_control_error}}
def event(:window_update, %{window_size: window_size}, _, size, conn_state)
when window_size+size > 2147483647,
do: %{conn_state|error: {:connection_error, :flow_control_error}}
def event(:window_update, :idle, _, size, conn_state),
do: %{conn_state|error: {:connection_error, :protocol_error}}
def event(:window_update, stream, flags, size, conn_state) do
new_state = FSM.next_state(:data, stream.state, flags)
case new_state do
{:error, error} ->
%{conn_state|error: {:connection_error, error}}
_ ->
streams = %{conn_state.streams|stream.id => %{stream|window_size: stream.window_size+size}}
%{conn_state|streams: streams}
end
end
# Concurrency count updates
def handle_concurrency_count(:open, :half_closed, conn_state), do: conn_state
def handle_concurrency_count(:open, _, %{concurrent_count: concurrent_count} = conn_state),
do: %{conn_state|concurrent_count: concurrent_count-1}
def handle_concurrency_count(:half_closed, _, %{concurrent_count: concurrent_count} = conn_state),
do: %{conn_state|concurrent_count: concurrent_count-1}
def handle_concurrency_count(_, :open, %{concurrent_count: concurrent_count} = conn_state),
do: %{conn_state|concurrent_count: concurrent_count+1}
def handle_concurrency_count(_, :half_closed, %{concurrent_count: concurrent_count} = conn_state),
do: %{conn_state|concurrent_count: concurrent_count+1}
def handle_concurrency_count(_, _, conn_state), do: conn_state
end
| 39.109091 | 136 | 0.681311 |
1c9187752fee2a601315b4b776f165c073d724a2 | 4,732 | exs | Elixir | test/printer/all_of_printer_test.exs | HenkPoley/json-schema-to-elm | 92230ac907d1eab27a0c8e4d969c5104f8e66383 | [
"MIT"
] | null | null | null | test/printer/all_of_printer_test.exs | HenkPoley/json-schema-to-elm | 92230ac907d1eab27a0c8e4d969c5104f8e66383 | [
"MIT"
] | null | null | null | test/printer/all_of_printer_test.exs | HenkPoley/json-schema-to-elm | 92230ac907d1eab27a0c8e4d969c5104f8e66383 | [
"MIT"
] | null | null | null | defmodule JS2ETest.Printer.AllOfPrinter do
use ExUnit.Case
require Logger
alias JS2E.Printer.AllOfPrinter
alias JsonSchema.Types.{
AllOfType,
EnumType,
ObjectType,
PrimitiveType,
SchemaDefinition,
TypeReference
}
test "print 'all of' type value" do
result =
all_of_type()
|> AllOfPrinter.print_type(schema_def(), %{}, module_name())
all_of_type_program = result.printed_schema
expected_all_of_type_program = """
type alias FancyCircle =
{ zero : Zero
, circle : Circle
}
"""
assert all_of_type_program == expected_all_of_type_program
end
test "print 'all of' decoder" do
result =
all_of_type()
|> AllOfPrinter.print_decoder(schema_def(), %{}, module_name())
expected_all_of_decoder_program = """
fancyCircleDecoder : Decoder FancyCircle
fancyCircleDecoder =
succeed FancyCircle
|> custom zeroDecoder
|> custom circleDecoder
"""
all_of_decoder_program = result.printed_schema
assert all_of_decoder_program == expected_all_of_decoder_program
end
test "print 'all of' encoder" do
result =
all_of_type()
|> AllOfPrinter.print_encoder(schema_def(), %{}, module_name())
expected_all_of_encoder_program = """
encodeFancyCircle : FancyCircle -> Value
encodeFancyCircle fancyCircle =
[]
|> encodeRequired "color" fancyCircle.zero.color encodeColor
|> encodeOptional "description" fancyCircle.zero.description Encode.string
|> encodeRequired "radius" fancyCircle.circle.radius Encode.float
|> Encode.object
"""
all_of_encoder_program = result.printed_schema
assert all_of_encoder_program == expected_all_of_encoder_program
end
test "print 'all of' fuzzer" do
result =
all_of_type()
|> AllOfPrinter.print_fuzzer(schema_def(), %{}, module_name())
expected_all_of_fuzzer_program = """
fancyCircleFuzzer : Fuzzer FancyCircle
fancyCircleFuzzer =
Fuzz.map2
FancyCircle
zeroFuzzer
circleFuzzer
encodeDecodeFancyCircleTest : Test
encodeDecodeFancyCircleTest =
fuzz fancyCircleFuzzer "can encode and decode FancyCircle object" <|
\\fancyCircle ->
fancyCircle
|> encodeFancyCircle
|> Decode.decodeValue fancyCircleDecoder
|> Expect.equal (Ok fancyCircle)
"""
all_of_fuzzer_program = result.printed_schema
assert all_of_fuzzer_program == expected_all_of_fuzzer_program
end
defp path, do: "#/definitions/fancyCircle"
def module_name, do: "Data"
def all_of_type do
%AllOfType{
name: "fancyCircle",
path: URI.parse(path()),
types: [
URI.parse(Path.join(path(), "allOf/0")),
URI.parse(Path.join(path(), "allOf/1"))
]
}
end
def schema_def do
%SchemaDefinition{
description: "'allOf' example schema",
id: URI.parse("http://example.com/all_of_example.json"),
title: "AllOfExample",
types: type_dict()
}
end
def type_dict do
%{
"#/definitions/fancyCircle/allOf/0" => %ObjectType{
name: "0",
path: URI.parse(Path.join(path(), "allOf/0")),
required: ["color"],
properties: %{
"color" => URI.parse(Path.join(path(), "allOf/0/properties/color")),
"description" => URI.parse(Path.join(path(), "allOf/0/properties/description"))
}
},
"#/definitions/fancyCircle/allOf/0/properties/color" => %TypeReference{
name: "color",
path: URI.parse("#/definitions/color")
},
"#/definitions/color" => %EnumType{
name: "color",
path: URI.parse("#/definitions/color"),
type: "string",
values: ["red", "yellow", "green"]
},
"#/definitions/fancyCircle/allOf/0/properties/description" => %PrimitiveType{
name: "description",
path: URI.parse(Path.join(path(), "allOf/0/properties/description")),
type: "string"
},
"#/definitions/fancyCircle/allOf/1" => %TypeReference{
name: "1",
path: URI.parse("#/definitions/circle")
},
"#/definitions/circle" => %ObjectType{
name: "circle",
path: URI.parse("#/definitions/circle"),
required: ["radius"],
properties: %{
"radius" => URI.parse("#/definitions/circle/properties/radius")
}
},
"#/definitions/circle/properties/radius" => %PrimitiveType{
name: "radius",
path: URI.parse("#/definitions/circle/properties/radius"),
type: "number"
}
}
end
end
| 28.166667 | 89 | 0.614328 |
1c9198b4944a758b36f4e838691ddd2ae02a5ec6 | 2,169 | ex | Elixir | clients/cloud_tasks/lib/google_api/cloud_tasks/v2/model/get_policy_options.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/cloud_tasks/lib/google_api/cloud_tasks/v2/model/get_policy_options.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/cloud_tasks/lib/google_api/cloud_tasks/v2/model/get_policy_options.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudTasks.V2.Model.GetPolicyOptions do
@moduledoc """
Encapsulates settings provided to GetIamPolicy.
## Attributes
* `requestedPolicyVersion` (*type:* `integer()`, *default:* `nil`) - Optional. The maximum policy version that will be used to format the policy. Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. Requests for policies with any conditional role bindings must specify version 3. Policies with no conditional role bindings may specify any valid value or leave the field unset. The policy in the response might use the policy version that you specified, or it might use a lower policy version. For example, if you specify version 3, but the policy has no conditional role bindings, the response uses version 1. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:requestedPolicyVersion => integer() | nil
}
field(:requestedPolicyVersion)
end
defimpl Poison.Decoder, for: GoogleApi.CloudTasks.V2.Model.GetPolicyOptions do
def decode(value, options) do
GoogleApi.CloudTasks.V2.Model.GetPolicyOptions.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudTasks.V2.Model.GetPolicyOptions do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.148936 | 805 | 0.766252 |
1c91998331d92d3cdeb0d431bf6a2245e5473e3b | 6,295 | exs | Elixir | test/queuetopia_test.exs | annatel/queuetopia | dd4be7390382c203821ab88388f0c31ba5538edf | [
"MIT"
] | 7 | 2020-08-06T21:58:13.000Z | 2021-08-07T18:32:44.000Z | test/queuetopia_test.exs | annatel/queuetopia | dd4be7390382c203821ab88388f0c31ba5538edf | [
"MIT"
] | 2 | 2020-10-22T11:53:45.000Z | 2021-06-22T05:45:27.000Z | test/queuetopia_test.exs | annatel/queuetopia | dd4be7390382c203821ab88388f0c31ba5538edf | [
"MIT"
] | 2 | 2020-11-04T00:23:43.000Z | 2021-04-30T07:25:01.000Z | defmodule QueuetopiaTest do
use Queuetopia.DataCase
alias Queuetopia.{TestQueuetopia, TestQueuetopia_2}
alias Queuetopia.Queue.Job
setup do
Application.put_env(:queuetopia, TestQueuetopia, disable?: false)
:ok
end
test "multiple instances can coexist" do
start_supervised!(TestQueuetopia)
start_supervised!(TestQueuetopia_2)
:sys.get_state(TestQueuetopia.Scheduler)
:sys.get_state(TestQueuetopia_2.Scheduler)
end
describe "start_link/1: poll_interval option" do
test "preseance to the param" do
Application.put_env(:queuetopia, TestQueuetopia, poll_interval: 3)
start_supervised!({TestQueuetopia, poll_interval: 4})
%{poll_interval: 4} = :sys.get_state(TestQueuetopia.Scheduler)
end
test "when there is no param, try to take the value from the config" do
Application.put_env(:queuetopia, TestQueuetopia, poll_interval: 3)
start_supervised!(TestQueuetopia)
%{poll_interval: 3} = :sys.get_state(TestQueuetopia.Scheduler)
end
test "when there is no param and no config, takes the default value" do
start_supervised!(TestQueuetopia)
%{poll_interval: 60_000} = :sys.get_state(TestQueuetopia.Scheduler)
end
end
test "disable? option" do
Application.put_env(:queuetopia, TestQueuetopia, disable?: true)
start_supervised!(TestQueuetopia)
assert is_nil(Process.whereis(TestQueuetopia.Scheduler))
end
describe "create_job/5" do
test "creates the job" do
jobs_params = Factory.params_for(:job)
opts = [
timeout: jobs_params.timeout,
max_backoff: jobs_params.max_backoff,
max_attempts: jobs_params.max_attempts
]
%{rows: [[sequence]], num_rows: 1} =
Ecto.Adapters.SQL.query!(Queuetopia.TestRepo, "SELECT sequence FROM queuetopia_sequences")
assert {:ok, %Job{} = job} =
TestQueuetopia.create_job(
jobs_params.queue,
jobs_params.action,
jobs_params.params,
jobs_params.scheduled_at,
opts
)
assert job.sequence == sequence + 1
assert job.scope == TestQueuetopia.scope()
assert job.queue == jobs_params.queue
assert job.performer == TestQueuetopia.performer()
assert job.action == jobs_params.action
assert job.params == jobs_params.params
assert not is_nil(job.scheduled_at)
assert job.timeout == jobs_params.timeout
assert job.max_backoff == jobs_params.max_backoff
assert job.max_attempts == jobs_params.max_attempts
end
test "when options are set" do
%{
queue: queue,
action: action,
params: params,
timeout: timeout,
max_backoff: max_backoff,
max_attempts: max_attempts
} = Factory.params_for(:job)
assert {:ok,
%Job{
queue: ^queue,
action: ^action,
params: ^params,
timeout: ^timeout,
max_backoff: ^max_backoff,
max_attempts: ^max_attempts
}} =
TestQueuetopia.create_job(queue, action, params, DateTime.utc_now(),
timeout: timeout,
max_backoff: max_backoff,
max_attempts: max_attempts
)
end
test "when timing options are not set, takes the default job timing options" do
timeout = Job.default_timeout()
max_backoff = Job.default_max_backoff()
max_attempts = Job.default_max_attempts()
%{queue: queue, action: action, params: params} = Factory.params_for(:job)
assert {:ok,
%Job{
timeout: ^timeout,
max_backoff: ^max_backoff,
max_attempts: ^max_attempts
}} = TestQueuetopia_2.create_job(queue, action, params)
end
test "a created job is immediatly tried if the queue is empty (no need to wait the poll_interval)" do
Application.put_env(:queuetopia, TestQueuetopia, poll_interval: 5_000)
start_supervised!(TestQueuetopia)
%{queue: queue, action: action, params: params} = Factory.params_for(:success_job)
assert {:ok, %Job{id: job_id}} = TestQueuetopia.create_job(queue, action, params)
assert_receive {^queue, ^job_id, :ok}, 1_000
:sys.get_state(TestQueuetopia.Scheduler)
end
end
test "create_job!/5 raises when params are not valid" do
assert_raise Ecto.InvalidChangesetError, fn ->
TestQueuetopia.create_job!("queue", "action", %{}, DateTime.utc_now(), timeout: -1)
end
end
test "list_jobs/1" do
%{id: id} = Factory.insert!(:job)
assert [%{id: ^id}] = TestQueuetopia.list_jobs()
end
test "paginate_jobs/1" do
%{id: id_1} = Factory.insert!(:job, sequence: 1)
%{id: id_2} = Factory.insert!(:job, sequence: 2)
assert %{data: [%{id: ^id_2}], total: 2} = TestQueuetopia.paginate_jobs(1, 1)
assert %{data: [%{id: ^id_1}], total: 2} = TestQueuetopia.paginate_jobs(1, 2)
assert %{data: [], total: 2} = TestQueuetopia.paginate_jobs(1, 3)
end
describe "handle_event/1" do
test "sends a poll message to the scheduler" do
Application.put_env(:queuetopia, TestQueuetopia, poll_interval: 5_000)
start_supervised!(TestQueuetopia)
scheduler_pid = Process.whereis(TestQueuetopia.Scheduler)
:sys.get_state(TestQueuetopia.Scheduler)
{:messages, messages} = Process.info(scheduler_pid, :messages)
assert length(messages) == 0
:sys.get_state(TestQueuetopia.Scheduler)
assert :ok = TestQueuetopia.handle_event(:new_incoming_job)
assert :ok = TestQueuetopia.handle_event(:new_incoming_job)
assert :ok = TestQueuetopia.handle_event(:new_incoming_job)
assert :ok = TestQueuetopia.handle_event(:new_incoming_job)
assert :ok = TestQueuetopia.handle_event(:new_incoming_job)
{:messages, messages} = Process.info(scheduler_pid, :messages)
assert length(messages) == 1
:sys.get_state(TestQueuetopia.Scheduler)
end
test "when the scheduler is down, returns an error tuple" do
assert {:error, "Queuetopia.TestQueuetopia is down"} ==
TestQueuetopia.handle_event(:new_incoming_job)
end
end
end
| 32.958115 | 105 | 0.654964 |
1c919a81d6f74024affe6a16448d8922e9b97f41 | 342 | exs | Elixir | test/acceptance/html/paragraphs_test.exs | maartenvanvliet/earmark | 6e0e4ae202728fa7b1f0e3e10ef982a167120450 | [
"Apache-1.1"
] | null | null | null | test/acceptance/html/paragraphs_test.exs | maartenvanvliet/earmark | 6e0e4ae202728fa7b1f0e3e10ef982a167120450 | [
"Apache-1.1"
] | null | null | null | test/acceptance/html/paragraphs_test.exs | maartenvanvliet/earmark | 6e0e4ae202728fa7b1f0e3e10ef982a167120450 | [
"Apache-1.1"
] | null | null | null | defmodule Acceptance.Html.ParagraphsTest do
use Support.AcceptanceTestCase
describe "Paragraphs" do
test "a para" do
markdown = "aaa\n\nbbb\n"
html = gen([{:p, "aaa"}, {:p, "bbb"}])
messages = []
assert as_html(markdown) == {:ok, html, messages}
end
end
end
# SPDX-License-Identifier: Apache-2.0
| 20.117647 | 55 | 0.616959 |
1c91b5cb2ff325986415beb4d38b3d5e1822b52a | 2,910 | exs | Elixir | test/tableau_test.exs | adolfont/tableau | 2912685c718f00291430acff46948c4d9b463130 | [
"MIT"
] | 3 | 2020-02-29T22:43:52.000Z | 2021-02-18T13:38:45.000Z | test/tableau_test.exs | adolfont/tableau | 2912685c718f00291430acff46948c4d9b463130 | [
"MIT"
] | null | null | null | test/tableau_test.exs | adolfont/tableau | 2912685c718f00291430acff46948c4d9b463130 | [
"MIT"
] | null | null | null | defmodule TableauTest do
use ExUnit.Case
doctest Tableau
doctest Linear
test "apply all linear rules to a list" do
linear_example = [
{:t, {:not, :a}},
{:f, {:not, :a}},
{:t, {:c, :and, :d}},
{:f, {:e, :or, :g}},
{:f, {:h, :implies, :i}}
]
expected_result = [f: :a, t: :a, t: :c, t: :d, f: :e, f: :g, t: :h, f: :i]
assert Linear.apply_all_linear_once(linear_example) == expected_result
end
test "apply all linear rules recursively" do
linear_example_2 = [
{:t, {:not, {:not, :a}}},
{:t, {:c, :and, {:d, :and, :g}}},
{:f, {:u, :and, :u}}
]
expected_result = [
t: {:not, {:not, :a}},
t: {:c, :and, {:d, :and, :g}},
f: {:u, :and, :u},
f: {:not, :a},
t: :c,
t: {:d, :and, :g},
t: :a,
t: :d,
t: :g
]
assert Linear.apply_all_linear_recursively(linear_example_2) == expected_result
end
@tag timeout: :infinity
test "Only for visual debugging" do
assert Tableau.prove([{:t, {:not, {:not, :a}}}, {:t, {:a, :implies, :b}}, {:f, :b}]) ==
%Proof{
branches: [
%Proof{
branches: [],
formulas: [
t: {:not, {:not, :a}},
t: {:a, :implies, :b},
f: :b,
f: {:not, :a},
t: :a,
f: :a
],
status: :closed
},
%Proof{
branches: [],
formulas: [
t: {:not, {:not, :a}},
t: {:a, :implies, :b},
f: :b,
f: {:not, :a},
t: :a,
t: :b
],
status: :closed
}
],
formulas: [
t: {:not, {:not, :a}},
t: {:a, :implies, :b},
f: :b,
f: {:not, :a},
t: :a
],
status: :closed
}
assert Tableau.prove(ProblemGenerator.php(3)).status == :closed
php_3 = ProblemGenerator.php(3)
assert Tableau.prove(php_3 -- [Enum.random(php_3)]).status == :open
# IO.inspect(
# Printing.show_proof(Tableau.prove([{:t, {:a, :implies, :b}}, {:t, {:c, :implies, :d}}]))
# )
# assert Printing.show_proof(
# Tableau.prove([{:t, {:a, :implies, :b}}, {:t, {:c, :implies, :d}}])
# ) ==
# "oi"
# assert Printing.show_proof(
# Tableau.prove([{:t, {:not, {:not, :a}}}, {:f, :a}, {:t, {:a, :implies, :b}}, {:f, :b}])
# ) == []
# |> IO.inspect()
# Tableau.prove([{:t, {:not, {:not, :b}}}, {:t, {:a, :implies, :b}}, {:f, :b}])
# |> IO.inspect()
end
end
| 27.45283 | 101 | 0.365979 |
1c91da41438bf4921490d17ff9593b8cfbe9214d | 2,605 | exs | Elixir | .credo.exs | olafura/accent | b081bd96a816f5ffd79a1b28cd5ba022d5eda803 | [
"BSD-3-Clause"
] | 1 | 2020-07-01T16:08:34.000Z | 2020-07-01T16:08:34.000Z | .credo.exs | queer/accent | 43a0ab8b43c99a8f8b3bde7020eb615c39037ad5 | [
"BSD-3-Clause"
] | null | null | null | .credo.exs | queer/accent | 43a0ab8b43c99a8f8b3bde7020eb615c39037ad5 | [
"BSD-3-Clause"
] | null | null | null | %{
configs: [
%{
name: "default",
strict: true,
files: %{
included: ["lib/", "test/", "priv/"],
excluded: []
},
checks: [
{Credo.Check.Consistency.ExceptionNames},
{Credo.Check.Consistency.LineEndings},
{Credo.Check.Consistency.SpaceAroundOperators},
{Credo.Check.Consistency.SpaceInParentheses},
{Credo.Check.Consistency.TabsOrSpaces},
{Credo.Check.Design.AliasUsage, if_called_more_often_than: 2, if_nested_deeper_than: 1},
{Credo.Check.Design.DuplicatedCode, mass_threshold: 70, excluded_macros: []},
{Credo.Check.Design.TagTODO},
{Credo.Check.Design.TagFIXME},
{Credo.Check.Readability.AliasOrder},
{Credo.Check.Readability.FunctionNames},
{Credo.Check.Readability.LargeNumbers},
{Credo.Check.Readability.MaxLineLength, max_length: 200},
{Credo.Check.Readability.ModuleAttributeNames},
{Credo.Check.Readability.ModuleDoc, false},
{Credo.Check.Readability.ModuleNames},
{Credo.Check.Readability.ParenthesesInCondition},
{Credo.Check.Readability.PredicateFunctionNames},
{Credo.Check.Readability.TrailingBlankLine},
{Credo.Check.Readability.TrailingWhiteSpace},
{Credo.Check.Readability.VariableNames},
{Credo.Check.Refactor.ABCSize, max_size: 50},
{Credo.Check.Refactor.CaseTrivialMatches},
{Credo.Check.Refactor.CondStatements},
{Credo.Check.Refactor.FunctionArity},
{Credo.Check.Refactor.MatchInCondition},
{Credo.Check.Refactor.PipeChainStart, excluded_argument_types: ~w(atom binary fn keyword)a, excluded_functions: ~w(from)},
{Credo.Check.Refactor.CyclomaticComplexity},
{Credo.Check.Refactor.MapInto, false},
{Credo.Check.Refactor.NegatedConditionsInUnless},
{Credo.Check.Refactor.NegatedConditionsWithElse},
{Credo.Check.Refactor.Nesting},
{Credo.Check.Refactor.UnlessWithElse},
{Credo.Check.Warning.IExPry},
{Credo.Check.Warning.IoInspect},
{Credo.Check.Warning.OperationOnSameValues},
{Credo.Check.Warning.BoolOperationOnSameValues},
{Credo.Check.Warning.UnusedEnumOperation},
{Credo.Check.Warning.UnusedKeywordOperation},
{Credo.Check.Warning.UnusedListOperation},
{Credo.Check.Warning.UnusedStringOperation},
{Credo.Check.Warning.UnusedTupleOperation},
{Credo.Check.Warning.OperationWithConstantResult},
{CredoEnvvar.Check.Warning.EnvironmentVariablesAtCompileTime}
]
}
]
}
| 44.152542 | 130 | 0.679846 |
1c9233ec18f973e1abe1ba070736f003dc4d1a10 | 688 | ex | Elixir | lib/server/pyserver.ex | sezaru/expyplot | 17dedaed998042d96293f5912d007d5ad3d0a4df | [
"MIT"
] | 30 | 2017-04-17T09:52:59.000Z | 2022-03-28T09:55:05.000Z | lib/server/pyserver.ex | sezaru/expyplot | 17dedaed998042d96293f5912d007d5ad3d0a4df | [
"MIT"
] | 1 | 2020-08-25T17:23:51.000Z | 2020-08-27T23:53:19.000Z | lib/server/pyserver.ex | sezaru/expyplot | 17dedaed998042d96293f5912d007d5ad3d0a4df | [
"MIT"
] | 4 | 2019-02-01T07:12:37.000Z | 2020-08-24T13:24:07.000Z | defmodule Server.Pyserver do
use GenServer
@pyport 9849
@pyserver_location "mat.py"
## Client API
@doc """
Starts the server.
"""
def start_link(name) do
GenServer.start_link(__MODULE__, :ok, name: name)
end
## Server Callbacks
def init(:ok) do
_pwd = File.cwd!()
system_python = get_python()
spawn fn -> System.cmd(system_python, [Path.join([:code.priv_dir(:expyplot), @pyserver_location]), Integer.to_string(@pyport)]) end
{:ok, %{}}
end
## Helper Functions
defp get_python do
# Check the system for python3, then for python
if System.find_executable("python3") do
"python3"
else
"python"
end
end
end
| 19.111111 | 135 | 0.649709 |
1c9234fd8b3d43b493f03d6eca04fa32d450142f | 156 | ex | Elixir | lib/lazymaru/helper.ex | aforward-oss/lazymaru | e656a342e1dc2989b8e27ab399cb2f4c571aa502 | [
"BSD-3-Clause"
] | null | null | null | lib/lazymaru/helper.ex | aforward-oss/lazymaru | e656a342e1dc2989b8e27ab399cb2f4c571aa502 | [
"BSD-3-Clause"
] | null | null | null | lib/lazymaru/helper.ex | aforward-oss/lazymaru | e656a342e1dc2989b8e27ab399cb2f4c571aa502 | [
"BSD-3-Clause"
] | null | null | null | defmodule Lazymaru.Helper do
defmacro __using__(_) do
quote do
use Lazymaru.Helpers.Response
import unquote(__MODULE__)
end
end
end
| 17.333333 | 35 | 0.705128 |
1c9252e173205f86482d12cf26681bded22c84c1 | 1,510 | ex | Elixir | apps/ae_channel_interface/lib/ae_channel_interface_web/views/error_helpers.ex | davidyuk/ae-channel-service | bfde1d3e6ca4fc6bcc4a387381d4838cd3f1ec42 | [
"0BSD"
] | 9 | 2019-04-24T17:21:18.000Z | 2021-07-19T16:41:33.000Z | apps/ae_channel_interface/lib/ae_channel_interface_web/views/error_helpers.ex | davidyuk/ae-channel-service | bfde1d3e6ca4fc6bcc4a387381d4838cd3f1ec42 | [
"0BSD"
] | 68 | 2019-08-06T09:25:13.000Z | 2021-03-10T11:04:41.000Z | apps/ae_channel_interface/lib/ae_channel_interface_web/views/error_helpers.ex | isabella232/ae-channel-service | bfde1d3e6ca4fc6bcc4a387381d4838cd3f1ec42 | [
"ISC"
] | 6 | 2019-10-23T18:23:48.000Z | 2022-03-06T07:02:45.000Z | defmodule AeChannelInterfaceWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error), class: "help-block")
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(AeChannelInterfaceWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(AeChannelInterfaceWeb.Gettext, "errors", msg, opts)
end
end
end
| 33.555556 | 87 | 0.677483 |
1c929041801514c7e2c2ba5a4d3d1a5168b3cb39 | 434 | exs | Elixir | .formatter.exs | samhamilton/ex_country_data | 1fd103d2d942e72ea4e94ff49d0b387084c351b6 | [
"MIT"
] | 5 | 2017-11-02T13:39:38.000Z | 2021-01-04T00:07:19.000Z | .formatter.exs | samhamilton/ex_country_data | 1fd103d2d942e72ea4e94ff49d0b387084c351b6 | [
"MIT"
] | 1 | 2021-02-04T04:36:02.000Z | 2021-02-04T04:36:02.000Z | .formatter.exs | samhamilton/ex_country_data | 1fd103d2d942e72ea4e94ff49d0b387084c351b6 | [
"MIT"
] | 1 | 2020-10-08T05:54:33.000Z | 2020-10-08T05:54:33.000Z | [
inputs: [
"lib/**/*.{ex,exs}",
"test/**/*.{ex,exs}",
"mix.exs"
],
locals_without_parens: [
mount: 1,
swagger: 1,
helpers: 1,
version: 1,
plug: :*,
plug_overridable: :*,
requires: :*,
optional: :*,
group: :*,
given: :*,
mutually_exclusive: 1,
exactly_one_of: 1,
at_least_one_of: 1,
all_or_none_of: 1,
prefix: :*,
rescue_from: :*,
desc: :*
]
]
| 16.074074 | 26 | 0.488479 |
1c92abc0a3b0ae53f0f29cc80ee16a7e5e6a8072 | 28,162 | exs | Elixir | test/floki/html/generated/tokenizer/namedEntities_part11_test.exs | nathanl/floki | 042b3f60f4d9a6218ec85d558d13cc6dac30c587 | [
"MIT"
] | 1,778 | 2015-01-07T14:12:31.000Z | 2022-03-29T22:42:48.000Z | test/floki/html/generated/tokenizer/namedEntities_part11_test.exs | nathanl/floki | 042b3f60f4d9a6218ec85d558d13cc6dac30c587 | [
"MIT"
] | 279 | 2015-01-01T15:54:50.000Z | 2022-03-28T18:06:03.000Z | test/floki/html/generated/tokenizer/namedEntities_part11_test.exs | nathanl/floki | 042b3f60f4d9a6218ec85d558d13cc6dac30c587 | [
"MIT"
] | 166 | 2015-04-24T20:48:02.000Z | 2022-03-28T17:29:05.000Z | defmodule Floki.HTML.Generated.Tokenizer.NamedentitiesPart11Test do
use ExUnit.Case, async: true
# NOTE: This file was generated by "mix generate_tokenizer_tests namedEntities.test".
# html5lib-tests rev: e52ff68cc7113a6ef3687747fa82691079bf9cc5
alias Floki.HTML.Tokenizer
test "tokenize/1 Bad named entity: gneq without a semi-colon" do
input = "&gneq"
output = [["Character", "&gneq"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: gneqq without a semi-colon" do
input = "&gneqq"
output = [["Character", "&gneqq"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: gnsim without a semi-colon" do
input = "&gnsim"
output = [["Character", "&gnsim"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: gopf without a semi-colon" do
input = "&gopf"
output = [["Character", "&gopf"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: grave without a semi-colon" do
input = "&grave"
output = [["Character", "&grave"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: gscr without a semi-colon" do
input = "&gscr"
output = [["Character", "&gscr"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: gsim without a semi-colon" do
input = "&gsim"
output = [["Character", "&gsim"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: gsime without a semi-colon" do
input = "&gsime"
output = [["Character", "&gsime"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: gsiml without a semi-colon" do
input = "&gsiml"
output = [["Character", "&gsiml"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: gvertneqq without a semi-colon" do
input = "&gvertneqq"
output = [["Character", "&gvertneqq"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: gvnE without a semi-colon" do
input = "&gvnE"
output = [["Character", "&gvnE"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hArr without a semi-colon" do
input = "&hArr"
output = [["Character", "&hArr"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hairsp without a semi-colon" do
input = "&hairsp"
output = [["Character", "&hairsp"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: half without a semi-colon" do
input = "&half"
output = [["Character", "&half"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hamilt without a semi-colon" do
input = "&hamilt"
output = [["Character", "&hamilt"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hardcy without a semi-colon" do
input = "&hardcy"
output = [["Character", "&hardcy"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: harr without a semi-colon" do
input = "&harr"
output = [["Character", "&harr"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: harrcir without a semi-colon" do
input = "&harrcir"
output = [["Character", "&harrcir"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: harrw without a semi-colon" do
input = "&harrw"
output = [["Character", "&harrw"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hbar without a semi-colon" do
input = "&hbar"
output = [["Character", "&hbar"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hcirc without a semi-colon" do
input = "&hcirc"
output = [["Character", "&hcirc"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hearts without a semi-colon" do
input = "&hearts"
output = [["Character", "&hearts"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: heartsuit without a semi-colon" do
input = "&heartsuit"
output = [["Character", "&heartsuit"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hellip without a semi-colon" do
input = "&hellip"
output = [["Character", "&hellip"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hercon without a semi-colon" do
input = "&hercon"
output = [["Character", "&hercon"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hfr without a semi-colon" do
input = "&hfr"
output = [["Character", "&hfr"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hksearow without a semi-colon" do
input = "&hksearow"
output = [["Character", "&hksearow"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hkswarow without a semi-colon" do
input = "&hkswarow"
output = [["Character", "&hkswarow"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hoarr without a semi-colon" do
input = "&hoarr"
output = [["Character", "&hoarr"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: homtht without a semi-colon" do
input = "&homtht"
output = [["Character", "&homtht"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hookleftarrow without a semi-colon" do
input = "&hookleftarrow"
output = [["Character", "&hookleftarrow"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hookrightarrow without a semi-colon" do
input = "&hookrightarrow"
output = [["Character", "&hookrightarrow"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hopf without a semi-colon" do
input = "&hopf"
output = [["Character", "&hopf"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: horbar without a semi-colon" do
input = "&horbar"
output = [["Character", "&horbar"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hscr without a semi-colon" do
input = "&hscr"
output = [["Character", "&hscr"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hslash without a semi-colon" do
input = "&hslash"
output = [["Character", "&hslash"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hstrok without a semi-colon" do
input = "&hstrok"
output = [["Character", "&hstrok"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hybull without a semi-colon" do
input = "&hybull"
output = [["Character", "&hybull"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: hyphen without a semi-colon" do
input = "&hyphen"
output = [["Character", "&hyphen"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: ic without a semi-colon" do
input = "&ic"
output = [["Character", "&ic"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: icy without a semi-colon" do
input = "&icy"
output = [["Character", "&icy"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: iecy without a semi-colon" do
input = "&iecy"
output = [["Character", "&iecy"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: iff without a semi-colon" do
input = "&iff"
output = [["Character", "&iff"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: ifr without a semi-colon" do
input = "&ifr"
output = [["Character", "&ifr"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: ii without a semi-colon" do
input = "&ii"
output = [["Character", "&ii"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: iiiint without a semi-colon" do
input = "&iiiint"
output = [["Character", "&iiiint"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: iiint without a semi-colon" do
input = "&iiint"
output = [["Character", "&iiint"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: iinfin without a semi-colon" do
input = "&iinfin"
output = [["Character", "&iinfin"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: iiota without a semi-colon" do
input = "&iiota"
output = [["Character", "&iiota"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: ijlig without a semi-colon" do
input = "&ijlig"
output = [["Character", "&ijlig"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: imacr without a semi-colon" do
input = "&imacr"
output = [["Character", "&imacr"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: image without a semi-colon" do
input = "&image"
output = [["Character", "&image"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: imagline without a semi-colon" do
input = "&imagline"
output = [["Character", "&imagline"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: imagpart without a semi-colon" do
input = "&imagpart"
output = [["Character", "&imagpart"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: imath without a semi-colon" do
input = "&imath"
output = [["Character", "&imath"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: imof without a semi-colon" do
input = "&imof"
output = [["Character", "&imof"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: imped without a semi-colon" do
input = "&imped"
output = [["Character", "&imped"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: in without a semi-colon" do
input = "&in"
output = [["Character", "&in"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: incare without a semi-colon" do
input = "&incare"
output = [["Character", "&incare"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: infin without a semi-colon" do
input = "&infin"
output = [["Character", "&infin"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: infintie without a semi-colon" do
input = "&infintie"
output = [["Character", "&infintie"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: inodot without a semi-colon" do
input = "&inodot"
output = [["Character", "&inodot"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: int without a semi-colon" do
input = "&int"
output = [["Character", "&int"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: intcal without a semi-colon" do
input = "&intcal"
output = [["Character", "&intcal"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: integers without a semi-colon" do
input = "&integers"
output = [["Character", "&integers"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: intercal without a semi-colon" do
input = "&intercal"
output = [["Character", "&intercal"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: intlarhk without a semi-colon" do
input = "&intlarhk"
output = [["Character", "&intlarhk"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: intprod without a semi-colon" do
input = "&intprod"
output = [["Character", "&intprod"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: iocy without a semi-colon" do
input = "&iocy"
output = [["Character", "&iocy"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: iogon without a semi-colon" do
input = "&iogon"
output = [["Character", "&iogon"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: iopf without a semi-colon" do
input = "&iopf"
output = [["Character", "&iopf"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: iota without a semi-colon" do
input = "&iota"
output = [["Character", "&iota"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: iprod without a semi-colon" do
input = "&iprod"
output = [["Character", "&iprod"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: iscr without a semi-colon" do
input = "&iscr"
output = [["Character", "&iscr"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: isin without a semi-colon" do
input = "&isin"
output = [["Character", "&isin"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: isinE without a semi-colon" do
input = "&isinE"
output = [["Character", "&isinE"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: isindot without a semi-colon" do
input = "&isindot"
output = [["Character", "&isindot"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: isins without a semi-colon" do
input = "&isins"
output = [["Character", "&isins"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: isinsv without a semi-colon" do
input = "&isinsv"
output = [["Character", "&isinsv"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: isinv without a semi-colon" do
input = "&isinv"
output = [["Character", "&isinv"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: it without a semi-colon" do
input = "&it"
output = [["Character", "&it"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: itilde without a semi-colon" do
input = "&itilde"
output = [["Character", "&itilde"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: iukcy without a semi-colon" do
input = "&iukcy"
output = [["Character", "&iukcy"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: jcirc without a semi-colon" do
input = "&jcirc"
output = [["Character", "&jcirc"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: jcy without a semi-colon" do
input = "&jcy"
output = [["Character", "&jcy"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: jfr without a semi-colon" do
input = "&jfr"
output = [["Character", "&jfr"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: jmath without a semi-colon" do
input = "&jmath"
output = [["Character", "&jmath"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: jopf without a semi-colon" do
input = "&jopf"
output = [["Character", "&jopf"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: jscr without a semi-colon" do
input = "&jscr"
output = [["Character", "&jscr"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: jsercy without a semi-colon" do
input = "&jsercy"
output = [["Character", "&jsercy"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: jukcy without a semi-colon" do
input = "&jukcy"
output = [["Character", "&jukcy"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: kappa without a semi-colon" do
input = "&kappa"
output = [["Character", "&kappa"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: kappav without a semi-colon" do
input = "&kappav"
output = [["Character", "&kappav"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: kcedil without a semi-colon" do
input = "&kcedil"
output = [["Character", "&kcedil"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: kcy without a semi-colon" do
input = "&kcy"
output = [["Character", "&kcy"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: kfr without a semi-colon" do
input = "&kfr"
output = [["Character", "&kfr"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: kgreen without a semi-colon" do
input = "&kgreen"
output = [["Character", "&kgreen"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: khcy without a semi-colon" do
input = "&khcy"
output = [["Character", "&khcy"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: kjcy without a semi-colon" do
input = "&kjcy"
output = [["Character", "&kjcy"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
test "tokenize/1 Bad named entity: kopf without a semi-colon" do
input = "&kopf"
output = [["Character", "&kopf"]]
result =
input
|> Tokenizer.tokenize()
|> TokenizerTestLoader.tokenization_result()
assert result.tokens == output
end
end
| 23.293631 | 87 | 0.628862 |
1c92c5099c4dd66fa97d818b64e64dda15107157 | 1,298 | ex | Elixir | lib/server2server_web/endpoint.ex | iboard/phxserver2server | 8c8d31be87dabdf846469492525d7bf6a61bccdd | [
"MIT"
] | null | null | null | lib/server2server_web/endpoint.ex | iboard/phxserver2server | 8c8d31be87dabdf846469492525d7bf6a61bccdd | [
"MIT"
] | null | null | null | lib/server2server_web/endpoint.ex | iboard/phxserver2server | 8c8d31be87dabdf846469492525d7bf6a61bccdd | [
"MIT"
] | null | null | null | defmodule Server2serverWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :server2server
socket "/socket", Server2serverWeb.UserSocket,
websocket: true,
longpoll: false
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :server2server,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_server2server_key",
signing_salt: "M1gh9IF6"
plug Server2serverWeb.Router
end
| 27.617021 | 69 | 0.717257 |
1c92fee91fbf369c48afdd2457311f733d764d55 | 3,812 | ex | Elixir | clients/identity_toolkit/lib/google_api/identity_toolkit/v3/model/relyingparty.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/identity_toolkit/lib/google_api/identity_toolkit/v3/model/relyingparty.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/identity_toolkit/lib/google_api/identity_toolkit/v3/model/relyingparty.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.IdentityToolkit.V3.Model.Relyingparty do
@moduledoc """
Request of getting a code for user confirmation (reset password, change email etc.)
## Attributes
- androidInstallApp (boolean()): whether or not to install the android app on the device where the link is opened Defaults to: `null`.
- androidMinimumVersion (String.t): minimum version of the app. if the version on the device is lower than this version then the user is taken to the play store to upgrade the app Defaults to: `null`.
- androidPackageName (String.t): android package name of the android app to handle the action code Defaults to: `null`.
- canHandleCodeInApp (boolean()): whether or not the app can handle the oob code without first going to web Defaults to: `null`.
- captchaResp (String.t): The recaptcha response from the user. Defaults to: `null`.
- challenge (String.t): The recaptcha challenge presented to the user. Defaults to: `null`.
- continueUrl (String.t): The url to continue to the Gitkit app Defaults to: `null`.
- email (String.t): The email of the user. Defaults to: `null`.
- iOSAppStoreId (String.t): iOS app store id to download the app if it's not already installed Defaults to: `null`.
- iOSBundleId (String.t): the iOS bundle id of iOS app to handle the action code Defaults to: `null`.
- idToken (String.t): The user's Gitkit login token for email change. Defaults to: `null`.
- kind (String.t): The fixed string \"identitytoolkit#relyingparty\". Defaults to: `null`.
- newEmail (String.t): The new email if the code is for email change. Defaults to: `null`.
- requestType (String.t): The request type. Defaults to: `null`.
- userIp (String.t): The IP address of the user. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:androidInstallApp => any(),
:androidMinimumVersion => any(),
:androidPackageName => any(),
:canHandleCodeInApp => any(),
:captchaResp => any(),
:challenge => any(),
:continueUrl => any(),
:email => any(),
:iOSAppStoreId => any(),
:iOSBundleId => any(),
:idToken => any(),
:kind => any(),
:newEmail => any(),
:requestType => any(),
:userIp => any()
}
field(:androidInstallApp)
field(:androidMinimumVersion)
field(:androidPackageName)
field(:canHandleCodeInApp)
field(:captchaResp)
field(:challenge)
field(:continueUrl)
field(:email)
field(:iOSAppStoreId)
field(:iOSBundleId)
field(:idToken)
field(:kind)
field(:newEmail)
field(:requestType)
field(:userIp)
end
defimpl Poison.Decoder, for: GoogleApi.IdentityToolkit.V3.Model.Relyingparty do
def decode(value, options) do
GoogleApi.IdentityToolkit.V3.Model.Relyingparty.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.IdentityToolkit.V3.Model.Relyingparty do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.355556 | 202 | 0.699895 |
1c93134f8de0e426f9b3589628f09075eea9a11b | 202 | exs | Elixir | machine_translation/MorpHIN/Learned/Resources/TrainingInstances/82.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | machine_translation/MorpHIN/Learned/Resources/TrainingInstances/82.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | machine_translation/MorpHIN/Learned/Resources/TrainingInstances/82.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | **EXAMPLE FILE**
pnoun * pnoun cardinal * cm * nst;
conj * verb noun * verb_aux * nst;
cm * verb pn * verb_aux * nst;
quantifier * particle particle * SYM * adjective;
verb * pnoun cm * cm * nst;
| 25.25 | 50 | 0.643564 |
1c9317eda9b17f1b6fe79e9f01bdb078b884f332 | 2,343 | ex | Elixir | clients/game_services/lib/google_api/game_services/v1beta/model/location.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/game_services/lib/google_api/game_services/v1beta/model/location.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/game_services/lib/google_api/game_services/v1beta/model/location.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.GameServices.V1beta.Model.Location do
@moduledoc """
A resource that represents Google Cloud Platform location.
## Attributes
* `displayName` (*type:* `String.t`, *default:* `nil`) - The friendly name for this location, typically a nearby city name. For example, "Tokyo".
* `labels` (*type:* `map()`, *default:* `nil`) - Cross-service attributes for the location. For example {"cloud.googleapis.com/region": "us-east1"}
* `locationId` (*type:* `String.t`, *default:* `nil`) - The canonical id for this location. For example: `"us-east1"`.
* `metadata` (*type:* `map()`, *default:* `nil`) - Service-specific metadata. For example the available capacity at the given location.
* `name` (*type:* `String.t`, *default:* `nil`) - Resource name for the location, which may vary between implementations. For example: `"projects/example-project/locations/us-east1"`
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:displayName => String.t() | nil,
:labels => map() | nil,
:locationId => String.t() | nil,
:metadata => map() | nil,
:name => String.t() | nil
}
field(:displayName)
field(:labels, type: :map)
field(:locationId)
field(:metadata, type: :map)
field(:name)
end
defimpl Poison.Decoder, for: GoogleApi.GameServices.V1beta.Model.Location do
def decode(value, options) do
GoogleApi.GameServices.V1beta.Model.Location.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.GameServices.V1beta.Model.Location do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.711864 | 186 | 0.697397 |
1c9339b57bf997d7d84165e76c2c62913aac81c5 | 4,428 | ex | Elixir | apps/core/lib/core/black_list_users/black_list_users.ex | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 8 | 2019-06-14T11:34:49.000Z | 2021-08-05T19:14:24.000Z | apps/core/lib/core/black_list_users/black_list_users.ex | edenlabllc/ehealth.api.public | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 1 | 2019-07-08T15:20:22.000Z | 2019-07-08T15:20:22.000Z | apps/core/lib/core/black_list_users/black_list_users.ex | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 6 | 2018-05-11T13:59:32.000Z | 2022-01-19T20:15:22.000Z | defmodule Core.BlackListUsers do
@moduledoc false
use Core.Search, Application.get_env(:core, :repos)[:read_prm_repo]
import Ecto.{Query, Changeset}, warn: false
import Core.API.Helpers.Connection, only: [get_consumer_id: 1]
alias Core.BlackListUsers.BlackListUser
alias Core.BlackListUsers.Search
alias Core.Parties
alias Core.Parties.Party
alias Core.PRMRepo
alias Scrivener.Page
@mithril_api Application.get_env(:core, :api_resolvers)[:mithril]
@read_prm_repo Application.get_env(:core, :repos)[:read_prm_repo]
@fields_required [:tax_id]
@fields_optional [:is_active]
def list(params) do
with %Ecto.Changeset{valid?: true} = changeset <- changeset(%Search{}, params),
paging <- search(changeset, params, BlackListUser),
users <- paging.entries,
tax_ids = Enum.map(users, &Map.get(&1, :tax_id)) do
parties =
Party
|> where([p], p.tax_id in ^tax_ids)
|> @read_prm_repo.all()
|> Enum.group_by(&Map.get(&1, :tax_id))
users =
Enum.map(users, fn user ->
Map.put(user, :parties, Map.get(parties, user.tax_id))
end)
%Page{paging | entries: users}
end
end
def get_by_id!(id), do: @read_prm_repo.get!(BlackListUser, id)
def get_by(params), do: @read_prm_repo.get_by(BlackListUser, params)
def blacklisted?(tax_id) do
case get_by(%{tax_id: tax_id, is_active: true}) do
nil -> false
_ -> true
end
end
def create(headers, %{"tax_id" => tax_id}) do
user_id = get_consumer_id(headers)
case get_by(%{tax_id: tax_id, is_active: true}) do
nil ->
user_ids =
tax_id
|> Parties.get_user_ids_by_tax_id()
|> Enum.join(",")
%BlackListUser{}
|> changeset(%{"tax_id" => tax_id})
|> validate_users_blocked(user_ids)
|> remove_tokens_by_user_ids(user_ids, headers)
|> put_change(:inserted_by, user_id)
|> put_change(:updated_by, user_id)
|> PRMRepo.insert_and_log(user_id)
|> load_references()
_ ->
{:error, {:conflict, "This user is already in a black list"}}
end
end
def create(_user_id, params) do
changeset(%BlackListUser{}, params)
end
defp validate_users_blocked(changeset, user_ids) do
validate_change(changeset, :tax_id, fn :tax_id, _tax_id ->
users_amount = user_ids |> String.split(",") |> length()
%{"ids" => user_ids, "is_blocked" => true}
|> @mithril_api.search_user([])
|> check_blocked_users_amount(users_amount)
end)
end
defp check_blocked_users_amount({:ok, %{"data" => []}}, _users_amount) do
[users: "Not all users were blocked"]
end
defp check_blocked_users_amount({:ok, %{"data" => amount}}, users_amount) when length(amount) == users_amount do
[]
end
defp check_blocked_users_amount({:ok, %{"data" => _}}, _users_amount) do
[users: "Not all users were blocked"]
end
defp check_blocked_users_amount(_, _users_amount) do
[users: "Cannot fetch Mithril users"]
end
defp remove_tokens_by_user_ids(%Ecto.Changeset{valid?: true} = changeset, user_ids, headers) do
validate_change(changeset, :tax_id, fn :tax_id, _tax_id ->
case @mithril_api.delete_tokens_by_user_ids(user_ids, headers) do
{:ok, _} -> []
_ -> [user_tokens: "Cannot delete user tokens"]
end
end)
end
defp remove_tokens_by_user_ids(changeset, _user_ids, _headers) do
changeset
end
def deactivate(_updated_by, %BlackListUser{is_active: false}) do
{:error, {:conflict, "User is not in a black list"}}
end
def deactivate(updated_by, %BlackListUser{} = black_list_user) do
black_list_user
|> changeset(%{is_active: false, updated_by: updated_by})
|> PRMRepo.update_and_log(updated_by)
|> load_references()
end
def changeset(%Search{} = search, attrs) do
cast(search, attrs, Search.__schema__(:fields))
end
def changeset(%BlackListUser{} = black_list_user, attrs) do
black_list_user
|> cast(attrs, @fields_required ++ @fields_optional)
|> validate_required(@fields_required)
|> validate_format(:tax_id, ~r/^[0-9]{10}$/)
end
defp load_references({:ok, entity}) do
{:ok, load_references(entity)}
end
defp load_references(%BlackListUser{} = entity) do
@read_prm_repo.preload(entity, :parties)
end
defp load_references(err) do
err
end
end
| 28.753247 | 114 | 0.659666 |
1c9341e50484de535579286052c3b85c54a8d91f | 1,476 | exs | Elixir | spec/routemaster/middleware/logger_spec.exs | deliveroo/routemaster-client-ex | 059ee64a54bc65b731bce1f8474dc8d907eb05d0 | [
"MIT"
] | 2 | 2017-09-20T08:33:20.000Z | 2018-08-17T11:57:09.000Z | spec/routemaster/middleware/logger_spec.exs | deliveroo/routemaster-client-ex | 059ee64a54bc65b731bce1f8474dc8d907eb05d0 | [
"MIT"
] | 5 | 2017-09-25T21:48:48.000Z | 2018-02-26T09:06:01.000Z | spec/routemaster/middleware/logger_spec.exs | deliveroo/routemaster-client-ex | 059ee64a54bc65b731bce1f8474dc8d907eb05d0 | [
"MIT"
] | null | null | null | defmodule Routemaster.Middleware.LoggerSpec do
use ESpec, async: true
alias Routemaster.Middleware.Logger, as: MidLogger
before do
original_log_level = Logger.level
Logger.configure(level: :info)
{:shared, original_log_level: original_log_level}
end
finally do
Logger.configure(level: shared.original_log_level)
end
let :req_url, do: "https://localhost/hamsters/1"
let :name, do: "TestName"
let(:req_env) do
%Tesla.Env{
url: req_url(),
method: :get
}
end
let(:resp_env) do
%Tesla.Env{
url: req_url(),
status: 200,
method: :get
}
end
# The next element of the Tesla stack. It represents an HTTP request.
let :terminator do
{:fn, fn(_env) -> resp_env() end}
end
subject MidLogger.call(req_env(), [terminator()], [context: name()])
it "logs the outgoing requests" do
message = capture_log fn -> subject() end
regex = ~r{\[#{name()}\] GET #{req_url()} \-\> 200 \(\d+\.\d+ms\)}
expect message |> to(match regex)
end
context "if a Tesla exception is raised" do
let :terminator do
{:fn, fn(_env) -> raise Tesla.Error, "a test exception" end}
end
it "logs the exception message" do
message = capture_log fn ->
try do
subject()
rescue
_ -> nil
end
end
regex = ~r{\[#{name()}\] GET #{req_url()} \-\> a test exception}
expect message |> to(match regex)
end
end
end
| 21.705882 | 71 | 0.607046 |
1c934ae48335bfdb024c66d8eda70693cbd3de71 | 1,304 | exs | Elixir | hitb_umbrella/apps/hitb_web/config/dev.exs | wang12580/hitb | b004aad92660541bcbae5a2af933f48fa6b14cd7 | [
"MIT"
] | 1 | 2020-09-08T10:59:14.000Z | 2020-09-08T10:59:14.000Z | hitb_umbrella/apps/hitb_web/config/dev.exs | wang12580/hitb | b004aad92660541bcbae5a2af933f48fa6b14cd7 | [
"MIT"
] | null | null | null | hitb_umbrella/apps/hitb_web/config/dev.exs | wang12580/hitb | b004aad92660541bcbae5a2af933f48fa6b14cd7 | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :hitb_web, HitbWeb.Endpoint,
# http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: []
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# command from your terminal:
#
# openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" -keyout priv/server.key -out priv/server.pem
#
# The `http:` config above can be replaced with:
#
# https: [port: 4000, keyfile: "priv/server.key", certfile: "priv/server.pem"],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :hitb_web, HitbWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/hitb_web/views/.*(ex)$},
~r{lib/hitb_web/templates/.*(eex)$}
]
]
| 29.636364 | 170 | 0.690951 |
1c9383c650680c8b72aeb4ab262fe85d07760ee9 | 5,624 | ex | Elixir | lib/new_relic/distributed_trace.ex | alejandrodnm/elixir_agent | b2264d8f54244d136950cadd02f2533967a8e7cb | [
"Apache-2.0"
] | null | null | null | lib/new_relic/distributed_trace.ex | alejandrodnm/elixir_agent | b2264d8f54244d136950cadd02f2533967a8e7cb | [
"Apache-2.0"
] | null | null | null | lib/new_relic/distributed_trace.ex | alejandrodnm/elixir_agent | b2264d8f54244d136950cadd02f2533967a8e7cb | [
"Apache-2.0"
] | null | null | null | defmodule NewRelic.DistributedTrace do
@dt_header "newrelic"
@moduledoc false
alias NewRelic.DistributedTrace.{Context, Tracker}
alias NewRelic.Harvest.Collector.AgentRun
alias NewRelic.Transaction
def accept_distributed_trace_payload(:http, conn) do
case Plug.Conn.get_req_header(conn, @dt_header) do
[trace_payload | _] ->
trace_payload
|> Context.decode()
[] ->
:no_payload
end
end
def create_distributed_trace_payload(:http) do
case get_tracing_context() do
nil -> []
context -> [{@dt_header, Context.encode(context, get_current_span_guid())}]
end
end
def generate_new_context() do
{priority, sampled} = generate_sampling()
%Context{
account_id: AgentRun.account_id(),
app_id: AgentRun.primary_application_id(),
trust_key: AgentRun.trusted_account_key(),
priority: priority,
sampled: sampled
}
end
def track_transaction(context, transport_type: type) do
context
|> assign_transaction_guid()
|> report_attributes(transport_type: type)
|> convert_to_outbound()
|> set_tracing_context()
end
def report_attributes(
%Context{parent_id: nil} = context,
transport_type: _type
) do
[
guid: context.guid,
traceId: context.guid,
priority: context.priority,
sampled: context.sampled
]
|> NewRelic.add_attributes()
context
end
def report_attributes(context, transport_type: type) do
[
"parent.type": context.type,
"parent.app": context.app_id,
"parent.account": context.account_id,
"parent.transportType": type,
"parent.transportDuration": transport_duration(context.timestamp),
parentId: context.parent_id,
parentSpanId: context.span_guid,
guid: context.guid,
traceId: context.trace_id,
priority: context.priority,
sampled: context.sampled
]
|> NewRelic.add_attributes()
context
end
def convert_to_outbound(%Context{parent_id: nil} = context) do
%Context{
account_id: AgentRun.account_id(),
app_id: AgentRun.primary_application_id(),
parent_id: nil,
trust_key: context.trust_key,
guid: context.guid,
trace_id: context.guid,
priority: context.priority,
sampled: context.sampled
}
end
def convert_to_outbound(%Context{} = context) do
%Context{
account_id: AgentRun.account_id(),
app_id: AgentRun.primary_application_id(),
parent_id: context.guid,
trust_key: context.trust_key,
guid: context.guid,
trace_id: context.trace_id,
priority: context.priority,
sampled: context.sampled
}
end
def set_tracing_context(context) do
Tracker.store(self(), context: context)
end
def cleanup_context() do
Tracker.cleanup(self())
end
def get_tracing_context() do
if Transaction.Reporter.tracking?(self()) do
self()
|> Transaction.Reporter.root()
|> Tracker.fetch()
end
end
def set_span(:generic, attrs) do
Process.put(:nr_current_span_attrs, Enum.into(attrs, %{}))
end
def set_span(:http, url: url, method: method, component: component) do
Process.put(:nr_current_span_attrs, %{url: url, method: method, component: component})
end
def set_span(
:datastore,
statement: statement,
instance: instance,
address: address,
hostname: hostname,
component: component
) do
Process.put(:nr_current_span_attrs, %{
statement: statement,
instance: instance,
address: address,
hostname: hostname,
component: component
})
end
def get_span_attrs() do
Process.get(:nr_current_span_attrs) || %{}
end
def set_current_span(label: label, ref: ref) do
current = {label, ref}
previous_span = Process.get(:nr_current_span)
previous_span_attrs = Process.get(:nr_current_span_attrs)
Process.put(:nr_current_span, current)
{current, previous_span, previous_span_attrs}
end
def get_current_span_guid() do
case Process.get(:nr_current_span) do
nil -> generate_guid(pid: self())
{label, ref} -> generate_guid(pid: self(), label: label, ref: ref)
end
end
def reset_span(previous_span: previous_span, previous_span_attrs: previous_span_attrs) do
Process.put(:nr_current_span, previous_span)
Process.put(:nr_current_span_attrs, previous_span_attrs)
end
defp generate_sampling() do
case {generate_sample?(), generate_priority()} do
{true, priority} -> {priority + 1, true}
{false, priority} -> {priority, false}
end
end
defp generate_sample?() do
NewRelic.DistributedTrace.BackoffSampler.sample?()
end
defp generate_priority, do: :rand.uniform() |> Float.round(6)
def assign_transaction_guid(context) do
Map.put(context, :guid, generate_guid())
end
def generate_guid(), do: :crypto.strong_rand_bytes(8) |> Base.encode16() |> String.downcase()
def generate_guid(pid: pid), do: encode_guid([pid, node()])
def generate_guid(pid: pid, label: label, ref: ref), do: encode_guid([label, ref, pid, node()])
def encode_guid(segments) when is_list(segments) do
segments
|> Enum.map(&encode_guid/1)
|> Enum.join("")
|> String.pad_trailing(16, "0")
end
def encode_guid(term) do
term
|> :erlang.phash2()
|> Integer.to_charlist(16)
|> to_string()
|> String.slice(0..4)
|> String.downcase()
end
defp transport_duration(context_start_time) do
(System.system_time(:millisecond) - context_start_time) / 1_000
end
end
| 26.280374 | 97 | 0.67123 |
1c938cf604a02dd359b890240a216f35053ab788 | 8,763 | ex | Elixir | lib/bsv/tx_builder.ex | libitx/bsv-ex | 95070d645fd61f9ea6ba8758e2e1b2367d0f9320 | [
"Apache-2.0"
] | 26 | 2019-08-31T13:39:50.000Z | 2022-01-18T10:23:26.000Z | lib/bsv/tx_builder.ex | libitx/bsv-ex | 95070d645fd61f9ea6ba8758e2e1b2367d0f9320 | [
"Apache-2.0"
] | 4 | 2020-04-24T18:24:01.000Z | 2021-06-15T10:59:33.000Z | lib/bsv/tx_builder.ex | libitx/bsv-ex | 95070d645fd61f9ea6ba8758e2e1b2367d0f9320 | [
"Apache-2.0"
] | 4 | 2020-05-21T15:27:31.000Z | 2021-06-13T05:26:47.000Z | defmodule BSV.TxBuilder do
@moduledoc """
A flexible and powerful transaction building module and API.
The TxBuilder accepts inputs and outputs that are modules implementing the
`BSV.Contract` behaviour. This abstraction makes for a succinct and elegant
approach to building transactions. The `BSV.Contract` behaviour is flexible
and can be used to define any kind of locking and unlocking script, not
limited to a handful of standard transactions.
## Examples
Because each input and output is prepared with all the information it needs,
calling `to_tx/1` is all that is needed to build and sign the transaction.
iex> utxo = UTXO.from_params!(%{
...> "txid" => "5e3014372338f079f005eedc85359e4d96b8440e7dbeb8c35c4182e0c19a1a12",
...> "vout" => 0,
...> "satoshis" => 11000,
...> "script" => "76a914538fd179c8be0f289c730e33b5f6a3541be9668f88ac"
...> })
iex>
iex> builder = %TxBuilder{
...> inputs: [
...> P2PKH.unlock(utxo, %{keypair: @keypair})
...> ],
...> outputs: [
...> P2PKH.lock(10000, %{address: @address}),
...> OpReturn.lock(0, %{data: ["hello", "world"]})
...> ]
...> }
iex>
iex> tx = TxBuilder.to_tx(builder)
iex> Tx.to_binary(tx, encoding: :hex)
"0100000001121a9ac1e082415cc3b8be7d0e44b8964d9e3585dcee05f079f038233714305e000000006a47304402200f674ba40b14b8f85b751ad854244a4199008c5b491b076df2eb6c3efd0be4bf022004b48ef0e656ee1873d07cb3b06858970de702f63935df2fbe8816f1a5f15e1e412103f81f8c8b90f5ec06ee4245eab166e8af903fc73a6dd73636687ef027870abe39ffffffff0210270000000000001976a914538fd179c8be0f289c730e33b5f6a3541be9668f88ac00000000000000000e006a0568656c6c6f05776f726c6400000000"
"""
alias BSV.{Address, Contract, Script, Tx, TxIn, TxOut, UTXO, VarInt}
alias BSV.Contract.P2PKH
import BSV.Util, only: [reverse_bin: 1]
@default_rates %{
mine: %{ data: 0.5, standard: 0.5 },
relay: %{ data: 0.25, standard: 0.25 }
}
@default_opts %{
rates: @default_rates,
sort: false
}
defstruct inputs: [],
outputs: [],
change_script: nil,
lock_time: 0,
options: @default_opts
@typedoc "TxBuilder struct"
@type t() :: %__MODULE__{
inputs: list(Contract.t()),
outputs: list(Contract.t()),
change_script: Script.t() | nil,
lock_time: non_neg_integer(),
options: map()
}
@typedoc """
Fee quote
A fee quote is a data structure representing miner fees. It can be either a
single number representing satoshis per bytes, or a map with keys for both
`:data` and `:standard` miner rates.
"""
@type fee_quote() :: %{
mine: %{
data: number(),
standard: number()
},
relay: %{
data: number(),
standard: number()
},
} | %{
data: number(),
standard: number()
} | number()
@doc """
Adds the given unlocking script contract to the builder.
"""
@spec add_input(t(), Contract.t()) :: t()
def add_input(%__MODULE__{} = builder, %Contract{mfa: {_, :unlocking_script, _}} = input),
do: update_in(builder.inputs, & &1 ++ [input])
@doc """
Adds the given locking script contract to the builder.
"""
@spec add_output(t(), Contract.t()) :: t()
def add_output(%__MODULE__{} = builder, %Contract{mfa: {_, :locking_script, _}} = output),
do: update_in(builder.outputs, & &1 ++ [output])
@doc """
Calculates the required fee for the builder's transaction, optionally using
the given `t:fee_quote/0`.
When different `:data` and `:standard` rates are given, data outputs
(identified by locking scripts beginning with `OP_FALSE OP_RETURN`) are
calculated using the appropriate rate.
"""
@spec calc_required_fee(t(), fee_quote()) :: non_neg_integer()
def calc_required_fee(builder, rates \\ @default_rates)
def calc_required_fee(%__MODULE__{} = builder, rates) when is_number(rates),
do: calc_required_fee(builder, %{data: rates, standard: rates})
def calc_required_fee(%__MODULE__{} = builder, %{mine: rates}),
do: calc_required_fee(builder, rates)
def calc_required_fee(%__MODULE__{inputs: inputs, outputs: outputs}, %{data: _, standard: _} = rates) do
[
{:standard, 4 + 4}, # version & locktime
{:standard, length(inputs) |> VarInt.encode() |> byte_size()},
{:standard, length(outputs) |> VarInt.encode() |> byte_size()}
]
|> Kernel.++(Enum.map(inputs, & calc_script_fee(Contract.to_txin(&1))))
|> Kernel.++(Enum.map(outputs, & calc_script_fee(Contract.to_txout(&1))))
|> Enum.reduce(0, fn {type, bytes}, fee -> fee + ceil(rates[type] * bytes) end)
end
@doc """
Sets the change script on the builder as a P2PKH locking script to the given
address.
"""
@spec change_to(t(), Address.t() | Address.address_str()) :: t()
def change_to(%__MODULE__{} = builder, %Address{} = address) do
script = P2PKH.lock(0, %{address: address})
|> Contract.to_script()
Map.put(builder, :change_script, script)
end
def change_to(%__MODULE__{} = builder, address) when is_binary(address),
do: change_to(builder, Address.from_string!(address))
@doc """
Returns the sum of all inputs defined in the builder.
"""
@spec input_sum(t()) :: integer()
def input_sum(%__MODULE__{inputs: inputs}) do
inputs
|> Enum.map(& &1.subject.txout.satoshis)
|> Enum.sum()
end
@doc """
Returns the sum of all outputs defined in the builder.
"""
@spec output_sum(t()) :: integer()
def output_sum(%__MODULE__{outputs: outputs}) do
outputs
|> Enum.map(& &1.subject)
|> Enum.sum()
end
@doc """
Sorts the TxBuilder inputs and outputs according to [BIP-69](https://github.com/bitcoin/bips/blob/master/bip-0069.mediawiki).
BIP-69 defines deterministic lexographical indexing of transaction inputs and
outputs.
"""
@spec sort(t()) :: t()
def sort(%__MODULE__{} = builder) do
builder
|> Map.update!(:inputs, fn inputs ->
Enum.sort(inputs, fn %{subject: %UTXO{outpoint: a}}, %{subject: %UTXO{outpoint: b}} ->
{reverse_bin(a.hash), a.vout} < {reverse_bin(b.hash), b.vout}
end)
end)
|> Map.update!(:outputs, fn outputs ->
Enum.sort(outputs, fn a, b ->
script_a = Contract.to_script(a)
script_b = Contract.to_script(b)
{a.subject, Script.to_binary(script_a)} < {b.subject, Script.to_binary(script_b)}
end)
end)
end
@doc """
Builds and returns the signed transaction.
"""
@spec to_tx(t()) :: Tx.t()
def to_tx(%__MODULE__{inputs: inputs, outputs: outputs} = builder) do
builder = if builder.options.sort == true, do: sort(builder), else: builder
tx = struct(Tx, lock_time: builder.lock_time)
# First pass on populating inputs will zero out signatures
tx = Enum.reduce(inputs, tx, fn contract, tx ->
Tx.add_input(tx, Contract.to_txin(contract))
end)
# Create outputs
tx = Enum.reduce(outputs, tx, fn contract, tx ->
Tx.add_output(tx, Contract.to_txout(contract))
end)
# Append change if required
tx = case get_change_txout(builder) do
%TxOut{} = txout ->
Tx.add_output(tx, txout)
_ ->
tx
end
# Second pass on populating inputs with actual sigs
Enum.reduce(Enum.with_index(inputs), tx, fn {contract, vin}, tx ->
txin = contract
|> Contract.put_ctx({tx, vin})
|> Contract.to_txin()
update_in(tx.inputs, & List.replace_at(&1, vin, txin))
end)
end
# Returns change txout if script present and amount exceeds dust threshold
defp get_change_txout(%{change_script: %Script{} = script} = builder) do
change = input_sum(builder) - output_sum(builder)
fee = calc_required_fee(builder, builder.options.rates)
txout = %TxOut{script: script}
extra_fee = ceil(TxOut.get_size(txout) * builder.options.rates.mine.standard)
change = change - (fee + extra_fee)
if change >= dust_threshold(txout, builder.options.rates) do
Map.put(txout, :satoshis, change)
end
end
defp get_change_txout(_builder), do: nil
# Calculates the size of the given TxIn or TxOut
defp calc_script_fee(%TxIn{} = txin) do
{:standard, TxIn.get_size(txin)}
end
defp calc_script_fee(%TxOut{script: script} = txout) do
case script.chunks do
[:OP_FALSE, :OP_RETURN | _chunks] ->
{:data, TxOut.get_size(txout)}
_ ->
{:standard, TxOut.get_size(txout)}
end
end
# Returns the dust threshold of the given txout
# See: https://github.com/bitcoin-sv/bitcoin-sv/blob/master/src/primitives/transaction.h#L188-L208
defp dust_threshold(%TxOut{} = txout, %{relay: rates}),
do: 3 * floor((TxOut.get_size(txout) + 148) * rates.standard)
end
| 33.833977 | 436 | 0.65788 |
1c940dacd73a756d18f90e2bbd77903f6edf0312 | 3,335 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/video_category_list_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/video_category_list_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/you_tube/lib/google_api/you_tube/v3/model/video_category_list_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.YouTube.V3.Model.VideoCategoryListResponse do
@moduledoc """
## Attributes
* `etag` (*type:* `String.t`, *default:* `nil`) - Etag of this resource.
* `eventId` (*type:* `String.t`, *default:* `nil`) - Serialized EventId of the request which produced this response.
* `items` (*type:* `list(GoogleApi.YouTube.V3.Model.VideoCategory.t)`, *default:* `nil`) - A list of video categories that can be associated with YouTube videos. In this map, the video category ID is the map key, and its value is the corresponding videoCategory resource.
* `kind` (*type:* `String.t`, *default:* `youtube#videoCategoryListResponse`) - Identifies what kind of resource this is. Value: the fixed string "youtube#videoCategoryListResponse".
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set.
* `pageInfo` (*type:* `GoogleApi.YouTube.V3.Model.PageInfo.t`, *default:* `nil`) - General pagination information.
* `prevPageToken` (*type:* `String.t`, *default:* `nil`) - The token that can be used as the value of the pageToken parameter to retrieve the previous page in the result set.
* `tokenPagination` (*type:* `GoogleApi.YouTube.V3.Model.TokenPagination.t`, *default:* `nil`) -
* `visitorId` (*type:* `String.t`, *default:* `nil`) - The visitorId identifies the visitor.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:etag => String.t(),
:eventId => String.t(),
:items => list(GoogleApi.YouTube.V3.Model.VideoCategory.t()),
:kind => String.t(),
:nextPageToken => String.t(),
:pageInfo => GoogleApi.YouTube.V3.Model.PageInfo.t(),
:prevPageToken => String.t(),
:tokenPagination => GoogleApi.YouTube.V3.Model.TokenPagination.t(),
:visitorId => String.t()
}
field(:etag)
field(:eventId)
field(:items, as: GoogleApi.YouTube.V3.Model.VideoCategory, type: :list)
field(:kind)
field(:nextPageToken)
field(:pageInfo, as: GoogleApi.YouTube.V3.Model.PageInfo)
field(:prevPageToken)
field(:tokenPagination, as: GoogleApi.YouTube.V3.Model.TokenPagination)
field(:visitorId)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.VideoCategoryListResponse do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.VideoCategoryListResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.VideoCategoryListResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.971831 | 275 | 0.706447 |
1c9415bebc739d79835bfa0c4d8b3871a79c01b4 | 745 | ex | Elixir | lib/leather_web/models/plaid_item.ex | nicksergeant/leather | 15b1c9403999737f7a6ee9a1c0349e047805bbe6 | [
"MIT"
] | 67 | 2016-10-24T04:11:40.000Z | 2021-11-25T16:46:51.000Z | lib/leather_web/models/plaid_item.ex | nicksergeant/leather | 15b1c9403999737f7a6ee9a1c0349e047805bbe6 | [
"MIT"
] | 6 | 2017-08-17T21:43:50.000Z | 2021-11-03T13:13:49.000Z | lib/leather_web/models/plaid_item.ex | nicksergeant/leather | 15b1c9403999737f7a6ee9a1c0349e047805bbe6 | [
"MIT"
] | 7 | 2017-08-13T01:43:37.000Z | 2022-01-11T04:38:27.000Z | defmodule Leather.Plaid.Item do
@moduledoc "The Plaid.Item model for Leather. https://plaid.com/docs/api/#retrieve-item"
alias Leather.Plaid.Item
import Ecto.Changeset
use Ecto.Schema
schema "plaid_items" do
field(:institution_name, :string)
field(:plaid_access_token, :string)
field(:plaid_item_id, :string)
timestamps()
belongs_to(:user, Leather.User)
has_many(:plaid_accounts, Leather.Plaid.Account, foreign_key: :plaid_item_id)
end
@doc false
def changeset(%Item{} = plaid_item, attrs \\ :invalid) do
plaid_item
|> cast(attrs, [:institution_name, :plaid_access_token, :plaid_item_id])
|> validate_required([:institution_name, :plaid_access_token, :plaid_item_id, :user_id])
end
end
| 28.653846 | 92 | 0.726174 |
1c941a615f98f9c6934c5999b44a9207aaef024d | 3,451 | ex | Elixir | lib/unblock_me_solver/move.ex | aussiDavid/unblock_me_solver | 12d816b502f6f061444ed9ad6d62f6d9b1a039be | [
"MIT"
] | null | null | null | lib/unblock_me_solver/move.ex | aussiDavid/unblock_me_solver | 12d816b502f6f061444ed9ad6d62f6d9b1a039be | [
"MIT"
] | null | null | null | lib/unblock_me_solver/move.ex | aussiDavid/unblock_me_solver | 12d816b502f6f061444ed9ad6d62f6d9b1a039be | [
"MIT"
] | null | null | null | defmodule UnblockMeSolver.Move do
alias UnblockMeSolver.Move
@moduledoc false
@doc """
Determines if the problem is solvable
A problem is solvable when there are empty spaces to the right of the solution block ['A', 'A']
## Examples
iex> UnblockMeSolver.Move.solvable?([
...> ['B', 'B', nil, nil, nil],
...> ['C', 'C', nil, nil, nil],
...> ['A', 'A', nil, nil, nil],
...> ['D', 'D', nil, nil, nil],
...> ['E', 'E', nil, nil, nil],
...>])
true
"""
def solvable?(problem) do
problem
|> Move.extract_solution_row()
|> Enum.drop_while(fn x -> x != 'A' end)
|> Enum.filter(fn x -> x != 'A' end)
|> Enum.all?(fn x -> x == nil end)
end
@doc """
Determines if the problem is solved by checking if the solution block is on the right most column
## Examples
iex> UnblockMeSolver.Move.solved?([['A', 'A']], 'A')
true
iex> UnblockMeSolver.Move.solved?([['A', 'A', nil]], 'A')
false
"""
def solved?(problem, block) do
problem
|> Enum.find([], fn row -> Enum.any?(row, fn x -> x == block end) end)
|> Enum.reverse
|> Enum.at(0) == block
end
@doc """
Extracts the middle row from a problem
## Examples
iex> UnblockMeSolver.Move.extract_solution_row([
...> ['B', 'B', nil, nil, nil],
...> ['C', 'C', nil, nil, nil],
...> ['A', 'A', nil, nil, nil],
...> ['D', 'D', nil, nil, nil],
...> ['E', 'E', nil, nil, nil],
...>])
['A', 'A', nil, nil, nil]
"""
def extract_solution_row(problem) do
index = (length(problem) / 2) |> floor
case Enum.fetch(problem, index) do
{:ok, row} -> row
_ -> []
end
end
@doc """
Moves a block in the directation and returns a tuple
{blocked_block, updated_problem} of the block in the way
(nil if no block is in the way) and the updated problem
(assuming it was succesful)
## Examples
iex> UnblockMeSolver.Move.with_next([
...> ['C', 'C', nil],
...> ['A', 'A', nil],
...> ['D', 'D', nil],
...>], :right, 'A')
{nil, [
['C', 'C', nil],
[nil, 'A', 'A'],
['D', 'D', nil],
]}
iex> UnblockMeSolver.Move.with_next([
...> ['A', 'A', 'B'],
...> [nil, nil, 'B'],
...> [nil, nil, nil],
...>], :right, 'A')
{'B', [
['A', 'A', 'B'],
[nil, nil, 'B'],
[nil, nil, nil],
]}
"""
def with_next(problem, direction, block) do
case direction do
:right -> Move.Helper.right_with_next(problem, block)
:down -> Move.Helper.down_with_next(problem, block)
:up -> Move.Helper.up_with_next(problem, block)
:left -> Move.Helper.left_with_next(problem, block)
_ -> raise "Can not move in the direction #{direction}"
end
end
@doc """
Returns a the direction of the block in a problem
I.e. :horizontal, :vertical. nil if otherwise
## Examples
iex> UnblockMeSolver.Move.direction([[nil, 'A', 'A', nil]], 'A')
:horizontal
iex> UnblockMeSolver.Move.direction([[nil], ['A'], ['A'], [nil]], 'A')
:vertical
"""
def direction(problem, block) do
has_row? = fn row ->
Enum.count(row, fn x -> x == block end) > 1
end
cond do
Enum.any?(problem, has_row?) -> :horizontal
Enum.any?(Move.Helper.rotate_cw(problem), has_row?) -> :vertical
true -> nil
end
end
end
| 25.189781 | 99 | 0.522457 |
1c942df65ea333256709c89716e0b132ed9c4b74 | 6,458 | exs | Elixir | test/mint/integration_test.exs | tcrossland/mint | 9575e41eeffb641814abec3fcf4f9f2eefcbe732 | [
"Apache-2.0"
] | null | null | null | test/mint/integration_test.exs | tcrossland/mint | 9575e41eeffb641814abec3fcf4f9f2eefcbe732 | [
"Apache-2.0"
] | null | null | null | test/mint/integration_test.exs | tcrossland/mint | 9575e41eeffb641814abec3fcf4f9f2eefcbe732 | [
"Apache-2.0"
] | null | null | null | defmodule Mint.IntegrationTest do
use ExUnit.Case, async: true
import Mint.HTTP1.TestHelpers
alias Mint.{TransportError, HTTP}
describe "httpstat.us" do
@describetag :integration
@describetag skip: "Seems like httpstat.us is down"
test "SSL - select HTTP1" do
assert {:ok, conn} =
HTTP.connect(
:https,
"httpstat.us",
443
)
assert conn.__struct__ == Mint.HTTP1
assert {:ok, conn, request} = HTTP.request(conn, "GET", "/200", [], nil)
assert {:ok, _conn, responses} = receive_stream(conn)
assert [
{:status, ^request, 200},
{:headers, ^request, _},
{:done, ^request}
] = responses
end
@tag :capture_log
test "SSL - fail to select HTTP2" do
assert {:error, %TransportError{reason: :protocol_not_negotiated}} =
HTTP.connect(:https, "httpstat.us", 443,
protocols: [:http2],
transport_opts: [reuse_sessions: false]
)
end
end
describe "nghttp2.org" do
@describetag :integration
test "SSL - select HTTP1" do
assert {:ok, conn} = HTTP.connect(:https, "nghttp2.org", 443, protocols: [:http1])
assert conn.__struct__ == Mint.HTTP1
assert {:ok, conn, request} = HTTP.request(conn, "GET", "/httpbin/bytes/1", [], nil)
assert {:ok, _conn, responses} = receive_stream(conn)
assert [
{:status, ^request, 200},
{:headers, ^request, _},
{:data, ^request, <<_>>},
{:done, ^request}
] = responses
end
test "SSL - select HTTP2" do
assert {:ok, conn} = HTTP.connect(:https, "nghttp2.org", 443)
assert conn.__struct__ == Mint.HTTP2
assert {:ok, conn, request} = HTTP.request(conn, "GET", "/httpbin/bytes/1", [], nil)
assert {:ok, _conn, responses} = receive_stream(conn)
assert [
{:status, ^request, 200},
{:headers, ^request, _},
{:data, ^request, <<_>>},
{:done, ^request}
] = responses
end
end
describe "ssl certificate verification" do
@describetag :integration
test "bad certificate - badssl.com" do
assert {:error, %TransportError{reason: reason}} =
HTTP.connect(
:https,
"untrusted-root.badssl.com",
443,
transport_opts: [log_alert: false, reuse_sessions: false]
)
# OTP 21.3 changes the format of SSL errors. Let's support both ways for now.
assert reason == {:tls_alert, 'unknown ca'} or
match?({:tls_alert, {:unknown_ca, _}}, reason)
assert {:ok, _conn} =
HTTP.connect(
:https,
"untrusted-root.badssl.com",
443,
transport_opts: [verify: :verify_none]
)
end
test "bad hostname - badssl.com" do
assert {:error, %TransportError{reason: reason}} =
HTTP.connect(
:https,
"wrong.host.badssl.com",
443,
transport_opts: [log_alert: false, reuse_sessions: false]
)
# OTP 21.3 changes the format of SSL errors. Let's support both ways for now.
assert reason == {:tls_alert, 'handshake failure'} or
match?({:tls_alert, {:handshake_failure, _}}, reason)
assert {:ok, _conn} =
HTTP.connect(
:https,
"wrong.host.badssl.com",
443,
transport_opts: [verify: :verify_none]
)
end
end
describe "proxy" do
@describetag :proxy
test "200 response - http://httpbin.org" do
assert {:ok, conn} =
HTTP.connect(:http, "httpbin.org", 80, proxy: {:http, "localhost", 8888, []})
assert conn.__struct__ == Mint.UnsafeProxy
assert {:ok, conn, request} = HTTP.request(conn, "GET", "/", [], nil)
assert {:ok, _conn, responses} = receive_stream(conn)
assert [status, headers | responses] = responses
assert {:status, ^request, 200} = status
assert {:headers, ^request, headers} = headers
assert is_list(headers)
assert merge_body(responses, request) =~ "httpbin"
end
test "200 response - https://httpbin.org" do
assert {:ok, conn} =
HTTP.connect(:https, "httpbin.org", 443, proxy: {:http, "localhost", 8888, []})
assert {:ok, conn, request} = HTTP.request(conn, "GET", "/", [], nil)
assert {:ok, _conn, responses} = receive_stream(conn)
assert [status, headers | responses] = responses
assert {:status, ^request, 200} = status
assert {:headers, ^request, headers} = headers
assert is_list(headers)
assert merge_body(responses, request) =~ "httpbin.org"
end
test "200 response with explicit http2 - https://http2.golang.org" do
assert {:ok, conn} =
HTTP.connect(:https, "http2.golang.org", 443,
proxy: {:http, "localhost", 8888, []},
protocols: [:http2]
)
assert conn.__struct__ == Mint.HTTP2
assert {:ok, conn, request} = HTTP.request(conn, "GET", "/reqinfo", [], nil)
assert {:ok, _conn, responses} = receive_stream(conn)
assert [status, headers | responses] = responses
assert {:status, ^request, 200} = status
assert {:headers, ^request, headers} = headers
assert is_list(headers)
assert merge_body(responses, request) =~ "Protocol: HTTP/2.0"
end
test "200 response without explicit http2 - https://http2.golang.org" do
assert {:ok, conn} =
HTTP.connect(:https, "http2.golang.org", 443,
proxy: {:http, "localhost", 8888, []},
protocols: [:http1, :http2]
)
assert conn.__struct__ == Mint.HTTP2
assert {:ok, conn, request} = HTTP.request(conn, "GET", "/reqinfo", [], nil)
assert {:ok, _conn, responses} = receive_stream(conn)
assert [status, headers | responses] = responses
assert {:status, ^request, 200} = status
assert {:headers, ^request, headers} = headers
assert is_list(headers)
assert merge_body(responses, request) =~ "Protocol: HTTP/2.0"
end
end
end
| 33.46114 | 94 | 0.548622 |
1c943a57fcd28c0089d2dd7c3daf3dc54a1a0276 | 8,735 | exs | Elixir | config/releases.exs | NathanielScottStevens/hindsight | 0dda1a931cff85b62eb53d623cc59cdb970ec33a | [
"Apache-2.0"
] | null | null | null | config/releases.exs | NathanielScottStevens/hindsight | 0dda1a931cff85b62eb53d623cc59cdb970ec33a | [
"Apache-2.0"
] | null | null | null | config/releases.exs | NathanielScottStevens/hindsight | 0dda1a931cff85b62eb53d623cc59cdb970ec33a | [
"Apache-2.0"
] | null | null | null | import Config
#
# Both compilation and runtime configuration should be set here across all apps.
# Non-prod specific environment configuration should reside in an app's config
# file for that Mix environment. For example: apps/my_app/config/test.exs.
#
# Configuration accessing environment variables should ALWAYS set a default,
# as this configuration will ALWAYS be evaluated.
#
# Example:
#
# config :my_app, :some_key,
# abc: 123,
# foo: System.get_env("FOO", "bar:baz")
#
# Configures Elixir's Logger
config :logger,
level: :warn,
console: [
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
config :elixir, :time_zone_database, Tzdata.TimeZoneDatabase
secret_key_base =
System.get_env(
"SECRET_KEY_BASE",
"d2cgmPzW+bqVjs99FUeKJ0kOm0w8EZBvLS7UBM8EHi6uBKgW2oBAa9pR2KSu8Z2W"
)
presto_db = [
url: System.get_env("PRESTO_URL", "http://localhost:8080"),
catalog: "hive",
schema: "default"
]
kafka_endpoints =
System.get_env("KAFKA_ENDPOINTS", "localhost:9092")
|> String.split(",")
|> Enum.map(&String.trim/1)
|> Enum.map(fn entry -> String.split(entry, ":") end)
|> Enum.map(fn [host, port] -> {String.to_atom(host), String.to_integer(port)} end)
redix_args = [host: System.get_env("REDIS_HOST", "localhost")]
config :redix, :args, redix_args
# SERVICE_RECEIVE
config :service_receive, Receive.Application,
kafka_endpoints: kafka_endpoints,
brook: [
driver: [
module: Brook.Driver.Kafka,
init_arg: [
endpoints: kafka_endpoints,
topic: "event-stream",
group: "receive-event-stream",
consumer_config: [
begin_offset: :earliest,
offset_reset_policy: :reset_to_earliest
]
]
],
handlers: [Receive.Event.Handler],
storage: [
module: Brook.Storage.Redis,
init_arg: [redix_args: redix_args, namespace: "service:receive:view"]
],
dispatcher: Brook.Dispatcher.Noop
]
config :service_receive, Receive.Writer,
app_name: "service_receive",
kafka_endpoints: kafka_endpoints
# SERVICE_GATHER
config :service_gather, Gather.Application,
kafka_endpoints: kafka_endpoints,
brook: [
driver: [
module: Brook.Driver.Kafka,
init_arg: [
endpoints: kafka_endpoints,
topic: "event-stream",
group: "gather-event-stream",
consumer_config: [
begin_offset: :earliest,
offset_reset_policy: :reset_to_earliest
]
]
],
handlers: [Gather.Event.Handler],
storage: [
module: Brook.Storage.Redis,
init_arg: [redix_args: redix_args, namespace: "service:gather:view"]
],
dispatcher: Brook.Dispatcher.Noop
]
config :service_gather, Gather.Extraction, app_name: "service_gather"
config :service_gather, Gather.Writer,
app_name: "service_gather",
kafka_endpoints: kafka_endpoints
# SERVICE BROADCAST
config :service_broadcast, BroadcastWeb.Endpoint,
http: [:inet6, port: String.to_integer(System.get_env("PORT") || "4000")],
secret_key_base: secret_key_base,
render_errors: [view: BroadcastWeb.ErrorView, accepts: ~w(json)],
pubsub: [name: Broadcast.PubSub, adapter: Phoenix.PubSub.PG2],
server: true,
check_origin: false
config :service_broadcast, Broadcast.Application,
kafka_endpoints: kafka_endpoints,
brook: [
driver: [
module: Brook.Driver.Kafka,
init_arg: [
endpoints: kafka_endpoints,
topic: "event-stream",
group: "broadcast-event-stream",
consumer_config: [
begin_offset: :earliest,
offset_reset_policy: :reset_to_earliest
]
]
],
handlers: [Broadcast.Event.Handler],
storage: [
module: Brook.Storage.Redis,
init_arg: [redix_args: redix_args, namespace: "service:broadcast:view"]
],
dispatcher: Brook.Dispatcher.Noop
]
config :service_broadcast, Broadcast.Stream.Broadway.Configuration,
endpoints: kafka_endpoints,
broadway_config: [
producer: [
stages: 1
],
processors: [
default: [
stages: 1
]
],
batchers: [
default: [
stages: 1,
batch_size: 1_000,
batch_timeout: 1_000
]
]
]
config :service_broadcast, Broadcast.Stream.Broadway, app_name: "service_broadcast"
# SERVICE PERSIST
bucket_region = [region: System.get_env("BUCKET_REGION", "local")]
object_storage =
[
host: System.get_env("BUCKET_HOST"),
scheme: System.get_env("BUCKET_SCHEME"),
port: System.get_env("BUCKET_PORT")
]
|> Enum.filter(fn {_, val} -> val end)
|> Keyword.merge(bucket_region)
config :ex_aws, bucket_region
config :ex_aws, :s3, object_storage
config :service_persist, Persist.Application,
kafka_endpoints: kafka_endpoints,
brook: [
driver: [
module: Brook.Driver.Kafka,
init_arg: [
endpoints: kafka_endpoints,
topic: "event-stream",
group: "persist-event-stream",
consumer_config: [
begin_offset: :earliest,
offset_reset_policy: :reset_to_earliest
]
]
],
handlers: [Persist.Event.Handler],
storage: [
module: Brook.Storage.Redis,
init_arg: [redix_args: redix_args, namespace: "service:persist:view"]
],
dispatcher: Brook.Dispatcher.Noop,
event_processing_timeout: 20_000
]
config :service_persist, Persist.TableManager.Presto, Keyword.put(presto_db, :user, "hindsight")
config :service_persist, Persist.DataStorage.S3,
s3_bucket: System.get_env("BUCKET_NAME", "kdp-cloud-storage"),
s3_path: "hive-s3"
config :service_persist, Persist.Load.Broadway.Configuration,
endpoints: kafka_endpoints,
broadway_config: [
producer: [
stages: 1
],
processors: [
default: [
stages: 100
]
],
batchers: [
default: [
stages: 2,
batch_size: 1_000,
batch_timeout: 2_000
]
]
]
config :service_persist, Persist.Load.Broadway, app_name: "service_persist"
# SERVICE ORCHESTRATE
config :service_orchestrate, Orchestrate.Application,
brook: [
driver: [
module: Brook.Driver.Kafka,
init_arg: [
endpoints: kafka_endpoints,
topic: "event-stream",
group: "orchestrate-event-stream",
consumer_config: [
begin_offset: :earliest,
offset_reset_policy: :reset_to_earliest
]
]
],
handlers: [Orchestrate.Event.Handler],
storage: [
module: Brook.Storage.Redis,
init_arg: [redix_args: redix_args, namespace: "service:orchestrate:view"]
],
dispatcher: Brook.Dispatcher.Noop
]
# SERVICE ACQUIRE
config :service_acquire, AcquireWeb.Endpoint,
http: [:inet6, port: String.to_integer(System.get_env("PORT") || "4000")],
secret_key_base: secret_key_base,
render_errors: [view: AcquireWeb.ErrorView, accepts: ~w(json)],
pubsub: [name: Acquire.PubSub, adapter: Phoenix.PubSub.PG2],
server: true,
check_origin: false
config :service_acquire, Acquire.Application,
brook: [
driver: [
module: Brook.Driver.Kafka,
init_arg: [
endpoints: kafka_endpoints,
topic: "event-stream",
group: "acquire-event-stream",
consumer_config: [
begin_offset: :earliest,
offset_reset_policy: :reset_to_earliest
]
]
],
handlers: [Acquire.Event.Handler],
storage: [
module: Brook.Storage.Redis,
init_arg: [redix_args: redix_args, namespace: "service:acquire:view"]
],
dispatcher: Brook.Dispatcher.Noop
]
config :service_acquire, Acquire.Db.Presto, presto: Keyword.put(presto_db, :user, "acquire")
# SERVICE DEFINE
config :service_define, DefineWeb.Endpoint,
http: [:inet6, port: String.to_integer(System.get_env("DEFINE_PORT") || "4005")],
secret_key_base: secret_key_base,
live_view: [
signing_salt: secret_key_base,
],
render_errors: [view: DefineWeb.ErrorView, accepts: ~w(json)],
pubsub: [name: Define.PubSub, adapter: Phoenix.PubSub.PG2],
server: true,
check_origin: false
config :define_broadcast, Broadcast.Application,
kafka_endpoints: kafka_endpoints,
brook: [
driver: [
module: Brook.Driver.Kafka,
init_arg: [
endpoints: kafka_endpoints,
topic: "event-stream",
group: "define-event-stream",
consumer_config: [
begin_offset: :earliest,
offset_reset_policy: :reset_to_earliest
]
]
],
handlers: [Define.Event.Handler],
storage: [
module: Brook.Storage.Redis,
init_arg: [redix_args: redix_args, namespace: "service:define:view"]
],
dispatcher: Brook.Dispatcher.Noop
]
| 27.211838 | 96 | 0.665484 |
1c9485688279cb5e5f0c73db579b6b7474a23dfc | 7,412 | exs | Elixir | test/scenic/component/input/dropdown_test.exs | fhunleth/scenic | 02fc61916ebe0cb01bf436832409226cad2d1f8b | [
"Apache-2.0"
] | null | null | null | test/scenic/component/input/dropdown_test.exs | fhunleth/scenic | 02fc61916ebe0cb01bf436832409226cad2d1f8b | [
"Apache-2.0"
] | null | null | null | test/scenic/component/input/dropdown_test.exs | fhunleth/scenic | 02fc61916ebe0cb01bf436832409226cad2d1f8b | [
"Apache-2.0"
] | null | null | null | #
# Created by Boyd Multerer on 2018-11-18.
# Copyright © 2018 Kry10 Industries. All rights reserved.
#
defmodule Scenic.Component.Input.DropdownTest do
use ExUnit.Case, async: true
doctest Scenic
# alias Scenic.Component
alias Scenic.Graph
alias Scenic.Primitive
alias Scenic.ViewPort
alias Scenic.Component.Input.Dropdown
@items [{"a", 1}, {"b", 2}]
@initial_item 2
@data {@items, @initial_item}
@state %{
graph: Graph.build(),
selected_id: @initial_item,
theme: Primitive.Style.Theme.preset(:primary),
id: :test_id,
down: false,
hover_id: nil,
items: @items,
drop_time: 0,
rotate_caret: 0
}
@button_id :__dropbox_btn__
# ============================================================================
# info
test "info works" do
assert is_bitstring(Dropdown.info(:bad_data))
assert Dropdown.info(:bad_data) =~ ":bad_data"
end
# ============================================================================
# verify
test "verify passes valid data" do
assert Dropdown.verify(@data) == {:ok, @data}
end
test "verify fails invalid data" do
assert Dropdown.verify(:banana) == :invalid_data
# invalid item in list
data = {[{:a, 1}, {"b", 2}], 2}
assert Dropdown.verify(data) == :invalid_data
# selected is not in list
data = {[{"a", 1}, {"b", 2}], 3}
assert Dropdown.verify(data) == :invalid_data
end
# ============================================================================
# init
test "init works with simple data" do
{:ok, state} = Dropdown.init(@data, styles: %{}, id: :test_id)
%Graph{} = state.graph
assert state.selected_id == @initial_item
assert is_map(state.theme)
assert state.down == false
assert state.hover_id == nil
assert state.items == @items
assert state.id == :test_id
end
# ============================================================================
# handle_input - up
test "handle_input {:cursor_enter, _uid} - up" do
{:noreply, state} =
Dropdown.handle_input({:cursor_enter, 1}, %{id: 123}, %{@state | down: false})
assert state.hover_id == 123
end
test "handle_input {:cursor_exit, _uid} - up" do
{:noreply, state} =
Dropdown.handle_input({:cursor_exit, 1}, %{id: 123}, %{@state | down: false})
assert state.hover_id == nil
end
test "handle_input {:cursor_button, :press - up" do
context = %ViewPort.Context{viewport: self(), id: @button_id}
{:noreply, state} =
Dropdown.handle_input({:cursor_button, {:left, :press, nil, nil}}, context, %{
@state
| down: false
})
assert state.down == true
assert is_integer(state.drop_time) && state.drop_time > 0
# confirm the input was captured
assert_receive({:"$gen_cast", {:capture_input, ^context, [:cursor_button, :cursor_pos]}})
# confirm the graph was pushed
assert_receive({:"$gen_cast", {:push_graph, _, _, _}})
end
# ============================================================================
# handle_input - down
test "handle_input {:cursor_enter, _uid} - down" do
context = %ViewPort.Context{viewport: self(), id: 1}
{:noreply, state} = Dropdown.handle_input({:cursor_enter, 1}, context, %{@state | down: true})
assert state.hover_id == 1
# confirm the graph was pushed
assert_receive({:"$gen_cast", {:push_graph, _, _, _}})
end
test "handle_input {:cursor_exit, _uid} - down" do
context = %ViewPort.Context{viewport: self(), id: 1}
{:noreply, state} = Dropdown.handle_input({:cursor_exit, 1}, context, %{@state | down: true})
assert state.hover_id == nil
# confirm the graph was pushed
assert_receive({:"$gen_cast", {:push_graph, _, _, _}})
end
# mouse down outside menu
test "handle_input {:cursor_button, :press nil - down" do
context = %ViewPort.Context{viewport: self(), id: nil}
{:noreply, state} =
Dropdown.handle_input({:cursor_button, {:left, :press, nil, nil}}, context, %{
@state
| down: true
})
assert state.down == false
# confirm the input was released
assert_receive({:"$gen_cast", {:release_input, [:cursor_button, :cursor_pos]}})
# confirm the graph was pushed
assert_receive({:"$gen_cast", {:push_graph, _, _, _}})
end
# mouse down inside button - slow
test "handle_input {:cursor_button, :press button - down - slow - should close" do
context = %ViewPort.Context{viewport: self(), id: @button_id}
{:noreply, state} =
Dropdown.handle_input({:cursor_button, {:left, :release, nil, nil}}, context, %{
@state
| down: true
})
assert state.down == false
# confirm the input was released
assert_receive({:"$gen_cast", {:release_input, [:cursor_button, :cursor_pos]}})
# confirm the graph was pushed
assert_receive({:"$gen_cast", {:push_graph, _, _, _}})
end
# mouse down inside button - fast
test "handle_input {:cursor_button, :press button - down - fast - should stay down" do
context = %ViewPort.Context{viewport: self(), id: @button_id}
{:noreply, state} =
Dropdown.handle_input({:cursor_button, {:left, :release, nil, nil}}, context, %{
@state
| down: true,
drop_time: :os.system_time(:milli_seconds)
})
assert state.down == true
# confirm the input was not released
refute_receive({:"$gen_cast", {:release_input, [:cursor_button, :cursor_pos]}})
# confirm the graph was not pushed
refute_receive({:"$gen_cast", {:push_graph, _, _, _}})
end
# mouse released outside dropdown space
test "handle_input {:cursor_button, :release button - outside menu" do
context = %ViewPort.Context{viewport: self(), id: nil}
{:noreply, state} =
Dropdown.handle_input({:cursor_button, {:left, :release, nil, nil}}, context, %{
@state
| down: true,
drop_time: :os.system_time(:milli_seconds)
})
assert state.down == false
assert state.selected_id == @initial_item
# confirm the input was released
assert_receive({:"$gen_cast", {:release_input, [:cursor_button, :cursor_pos]}})
# confirm the value change was not sent
refute_receive({:"$gen_cast", {:event, {:value_changed, _, _}, _}})
# confirm the graph was pushed
assert_receive({:"$gen_cast", {:push_graph, _, _, _}})
end
# mouse released inside dropdown space
test "handle_input {:cursor_button, :release button - inside menu" do
self = self()
Process.put(:parent_pid, self)
context = %ViewPort.Context{viewport: self, id: 1}
{:noreply, state} =
Dropdown.handle_input({:cursor_button, {:left, :release, nil, nil}}, context, %{
@state
| down: true
})
assert state.down == false
assert state.selected_id == 1
# confirm the input was released
assert_receive({:"$gen_cast", {:release_input, [:cursor_button, :cursor_pos]}})
# confirm the value change was not sent
assert_receive({:"$gen_cast", {:event, {:value_changed, :test_id, 1}, ^self}})
# confirm the graph was pushed
assert_receive({:"$gen_cast", {:push_graph, _, _, _}})
end
test "handle_input does nothing on unknown input" do
context = %ViewPort.Context{viewport: self()}
{:noreply, state} = Dropdown.handle_input(:unknown, context, @state)
assert state == @state
end
end
| 29.767068 | 98 | 0.60483 |
1c94c07463d82cd9e33de5cf34df5ad33058d26c | 258 | ex | Elixir | lib/membrane/element/base/sink.ex | eboskma/membrane_core | e216994fe1ba99c5d228a4b0959faa5fabb13b1c | [
"Apache-2.0"
] | null | null | null | lib/membrane/element/base/sink.ex | eboskma/membrane_core | e216994fe1ba99c5d228a4b0959faa5fabb13b1c | [
"Apache-2.0"
] | null | null | null | lib/membrane/element/base/sink.ex | eboskma/membrane_core | e216994fe1ba99c5d228a4b0959faa5fabb13b1c | [
"Apache-2.0"
] | null | null | null | defmodule Membrane.Element.Base.Sink do
@moduledoc """
This module has been deprecated in favour of `Membrane.Sink`.
"""
@deprecated "Use `Membrane.Sink` instead"
defmacro __using__(_opts) do
quote do
use Membrane.Sink
end
end
end
| 19.846154 | 63 | 0.693798 |
1c94c9bb6c6c8893cd140935accf2bb5d33c6116 | 48,377 | exs | Elixir | lib/elixir/test/elixir/typespec_test.exs | britto/elixir | 1f6e7093cff4b68dada60b924399bc8404d39a7e | [
"Apache-2.0"
] | 1 | 2020-09-24T12:37:29.000Z | 2020-09-24T12:37:29.000Z | lib/elixir/test/elixir/typespec_test.exs | britto/elixir | 1f6e7093cff4b68dada60b924399bc8404d39a7e | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/typespec_test.exs | britto/elixir | 1f6e7093cff4b68dada60b924399bc8404d39a7e | [
"Apache-2.0"
] | null | null | null | Code.require_file("test_helper.exs", __DIR__)
# Holds tests for both Kernel.Typespec and Code.Typespec
defmodule TypespecTest do
use ExUnit.Case, async: true
alias TypespecTest.TypespecSample
defstruct [:hello]
defmacrop test_module(do: block) do
quote do
{:module, _, bytecode, _} =
defmodule TypespecSample do
unquote(block)
end
:code.delete(TypespecSample)
:code.purge(TypespecSample)
bytecode
end
end
defp types(bytecode) do
bytecode
|> Code.Typespec.fetch_types()
|> elem(1)
|> Enum.sort()
end
@skip_specs [__info__: 1]
defp specs(bytecode) do
bytecode
|> Code.Typespec.fetch_specs()
|> elem(1)
|> Enum.reject(fn {sign, _} -> sign in @skip_specs end)
|> Enum.sort()
end
defp callbacks(bytecode) do
bytecode
|> Code.Typespec.fetch_callbacks()
|> elem(1)
|> Enum.sort()
end
describe "Kernel.Typespec errors" do
test "invalid type specification" do
assert_raise CompileError, ~r"invalid type specification: my_type = 1", fn ->
test_module do
@type my_type = 1
end
end
end
test "unexpected expression in typespec" do
assert_raise CompileError, ~r"unexpected expression in typespec: \"foobar\"", fn ->
test_module do
@type my_type :: "foobar"
end
end
end
test "invalid function specification" do
assert_raise CompileError, ~r"invalid type specification: \"not a spec\"", fn ->
test_module do
@spec "not a spec"
end
end
assert_raise CompileError, ~r"invalid type specification: 1 :: 2", fn ->
test_module do
@spec 1 :: 2
end
end
end
test "undefined type" do
assert_raise CompileError, ~r"type foo/0 undefined", fn ->
test_module do
@type omg :: foo
end
end
assert_raise CompileError, ~r"type foo/2 undefined", fn ->
test_module do
@type omg :: foo(atom, integer)
end
end
assert_raise CompileError, ~r"type bar/0 undefined", fn ->
test_module do
@spec foo(bar, integer) :: {atom, integer}
def foo(var1, var2), do: {var1, var2}
end
end
assert_raise CompileError, ~r"type foo/0 undefined", fn ->
test_module do
@type omg :: __MODULE__.foo()
end
end
end
test "redefined type" do
assert_raise CompileError, ~r"type foo/0 is already defined", fn ->
test_module do
@type foo :: atom
@type foo :: integer
end
end
assert_raise CompileError, ~r"type foo/2 is already defined", fn ->
test_module do
@type foo :: atom
@type foo(var1, var2) :: {var1, var2}
@type foo(x, y) :: {x, y}
end
end
assert_raise CompileError, ~r"type foo/0 is already defined", fn ->
test_module do
@type foo :: atom
@typep foo :: integer
end
end
end
test "type variable unused (singleton type variable)" do
assert_raise CompileError, ~r"type variable x is unused", fn ->
test_module do
@type foo(x) :: integer
end
end
end
test "@type with a variable starting with underscore" do
test_module do
assert @type(foo(_hello) :: integer) == :ok
end
end
test "type variable _ should be invalid" do
assert_raise CompileError, ~r"type variable '_' is invalid", fn ->
test_module do
@type foo(_) :: integer
end
end
assert_raise CompileError, ~r"type variable '_' is invalid", fn ->
test_module do
@type foo(_, _) :: integer
end
end
end
test "spec for undefined function" do
assert_raise CompileError, ~r"spec for undefined function omg/0", fn ->
test_module do
@spec omg :: atom
end
end
end
test "spec variable unused (singleton type variable)" do
assert_raise CompileError, ~r"type variable x is unused", fn ->
test_module do
@spec foo(x, integer) :: integer when x: var
def foo(x, y), do: x + y
end
end
end
test "ill defined optional callback" do
assert_raise CompileError, ~r"invalid optional callback :foo", fn ->
test_module do
@optional_callbacks :foo
end
end
end
test "unknown optional callback" do
assert_raise CompileError, ~r"unknown callback foo/1 given as optional callback", fn ->
test_module do
@optional_callbacks foo: 1
end
end
end
test "repeated optional callback" do
message = ~r"foo/1 has been specified as optional callback more than once"
assert_raise CompileError, message, fn ->
test_module do
@callback foo(:ok) :: :ok
@optional_callbacks foo: 1, foo: 1
end
end
end
test "behaviour_info/1 explicitly defined alongside @callback/@macrocallback" do
message = ~r"cannot define @callback attribute for foo/1 when behaviour_info/1"
assert_raise CompileError, message, fn ->
test_module do
@callback foo(:ok) :: :ok
def behaviour_info(_), do: []
end
end
message = ~r"cannot define @macrocallback attribute for foo/1 when behaviour_info/1"
assert_raise CompileError, message, fn ->
test_module do
@macrocallback foo(:ok) :: :ok
def behaviour_info(_), do: []
end
end
end
test "default is not supported" do
assert_raise ArgumentError, fn ->
test_module do
@callback hello(num \\ 0 :: integer) :: integer
end
end
assert_raise ArgumentError, fn ->
test_module do
@callback hello(num :: integer \\ 0) :: integer
end
end
assert_raise ArgumentError, fn ->
test_module do
@macrocallback hello(num \\ 0 :: integer) :: Macro.t()
end
end
assert_raise ArgumentError, fn ->
test_module do
@macrocallback hello(num :: integer \\ 0) :: Macro.t()
end
end
assert_raise ArgumentError, fn ->
test_module do
@spec hello(num \\ 0 :: integer) :: integer
end
end
assert_raise ArgumentError, fn ->
test_module do
@spec hello(num :: integer \\ 0) :: integer
end
end
end
test "@spec shows readable error message when return type is missing" do
message = ~r"type specification missing return type: my_fun\(integer\)"
assert_raise CompileError, message, fn ->
test_module do
@spec my_fun(integer)
end
end
end
end
describe "Kernel.Typespec definitions" do
test "typespec declarations return :ok" do
test_module do
def foo(), do: nil
assert @type(foo :: any()) == :ok
assert @typep(foop :: any()) == :ok
assert @spec(foo() :: nil) == :ok
assert @opaque(my_type :: atom) == :ok
assert @callback(foo(foop) :: integer) == :ok
assert @macrocallback(foo(integer) :: integer) == :ok
end
end
test "@type with a single type" do
bytecode =
test_module do
@type my_type :: term
end
assert [type: {:my_type, {:type, _, :term, []}, []}] = types(bytecode)
end
test "@type with an atom" do
bytecode =
test_module do
@type my_type :: :foo
end
assert [type: {:my_type, {:atom, _, :foo}, []}] = types(bytecode)
end
test "@type with an atom alias" do
bytecode =
test_module do
@type my_type :: Atom
end
assert [type: {:my_type, {:atom, _, Atom}, []}] = types(bytecode)
end
test "@type with an integer" do
bytecode =
test_module do
@type my_type :: 10
end
assert [type: {:my_type, {:integer, _, 10}, []}] = types(bytecode)
end
test "@type with a negative integer" do
bytecode =
test_module do
@type my_type :: -10
end
assert [type: {:my_type, {:op, _, :-, {:integer, _, 10}}, []}] = types(bytecode)
end
test "@type with a remote type" do
bytecode =
test_module do
@type my_type :: Remote.Some.type()
@type my_type_arg :: Remote.type(integer)
end
assert [type: my_type, type: my_type_arg] = types(bytecode)
assert {:my_type, type, []} = my_type
assert {:remote_type, _, [{:atom, _, Remote.Some}, {:atom, _, :type}, []]} = type
assert {:my_type_arg, type, []} = my_type_arg
assert {:remote_type, _, args} = type
assert [{:atom, _, Remote}, {:atom, _, :type}, [{:type, _, :integer, []}]] = args
end
test "@type with a binary" do
bytecode =
test_module do
@type my_type :: binary
end
assert [type: {:my_type, {:type, _, :binary, []}, []}] = types(bytecode)
end
test "@type with an empty binary" do
bytecode =
test_module do
@type my_type :: <<>>
end
assert [type: {:my_type, {:type, _, :binary, [{:integer, _, 0}, {:integer, _, 0}]}, []}] =
types(bytecode)
end
test "@type with a binary with a base size" do
bytecode =
test_module do
@type my_type :: <<_::3>>
end
assert [type: {:my_type, {:type, _, :binary, [{:integer, _, 3}, {:integer, _, 0}]}, []}] =
types(bytecode)
end
test "@type with a binary with a unit size" do
bytecode =
test_module do
@type my_type :: <<_::_*8>>
end
assert [type: {:my_type, {:type, _, :binary, [{:integer, _, 0}, {:integer, _, 8}]}, []}] =
types(bytecode)
end
test "@type with a binary with a size and unit size" do
bytecode =
test_module do
@type my_type :: <<_::3, _::_*8>>
end
assert [type: {:my_type, {:type, _, :binary, [{:integer, _, 3}, {:integer, _, 8}]}, []}] =
types(bytecode)
end
test "@type with invalid binary spec" do
assert_raise CompileError, ~r"invalid binary specification", fn ->
test_module do
@type my_type :: <<_::3*8>>
end
end
assert_raise CompileError, ~r"invalid binary specification", fn ->
test_module do
@type my_type :: <<_::atom>>
end
end
assert_raise CompileError, ~r"invalid binary specification", fn ->
test_module do
@type my_type :: <<_::(-4)>>
end
end
assert_raise CompileError, ~r"invalid binary specification", fn ->
test_module do
@type my_type :: <<_::3, _::_*atom>>
end
end
assert_raise CompileError, ~r"invalid binary specification", fn ->
test_module do
@type my_type :: <<_::3, _::_*(-8)>>
end
end
end
test "@type with a range op" do
bytecode =
test_module do
@type range1 :: 1..10
@type range2 :: -1..1
end
assert [
{:type, {:range1, {:type, _, :range, range1_args}, []}},
{:type, {:range2, {:type, _, :range, range2_args}, []}}
] = types(bytecode)
assert [{:integer, _, 1}, {:integer, _, 10}] = range1_args
assert [{:op, _, :-, {:integer, _, 1}}, {:integer, _, 1}] = range2_args
end
test "@type with invalid range" do
assert_raise CompileError, ~r"invalid range specification", fn ->
test_module do
@type my_type :: atom..10
end
end
end
test "@type with a keyword map" do
bytecode =
test_module do
@type my_type :: %{hello: :world}
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :map, [arg]} = type
assert {:type, _, :map_field_exact, [{:atom, _, :hello}, {:atom, _, :world}]} = arg
end
test "@type with a map" do
bytecode =
test_module do
@type my_type :: %{required(:a) => :b, optional(:c) => :d}
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :map, [arg1, arg2]} = type
assert {:type, _, :map_field_exact, [{:atom, _, :a}, {:atom, _, :b}]} = arg1
assert {:type, _, :map_field_assoc, [{:atom, _, :c}, {:atom, _, :d}]} = arg2
end
test "@type with a struct" do
bytecode =
test_module do
defstruct hello: nil, other: nil
@type my_type :: %TypespecSample{hello: :world}
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :map, [struct, arg1, arg2]} = type
assert {:type, _, :map_field_exact, struct_args} = struct
assert [{:atom, _, :__struct__}, {:atom, _, TypespecSample}] = struct_args
assert {:type, _, :map_field_exact, [{:atom, _, :hello}, {:atom, _, :world}]} = arg1
assert {:type, _, :map_field_exact, [{:atom, _, :other}, {:type, _, :term, []}]} = arg2
end
@fields Enum.map(10..42, &{:"f#{&1}", :ok})
test "@type with a large struct" do
bytecode =
test_module do
defstruct unquote(@fields)
@type my_type :: %TypespecSample{unquote_splicing(@fields)}
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :map, [struct, arg1, arg2 | _]} = type
assert {:type, _, :map_field_exact, struct_args} = struct
assert [{:atom, _, :__struct__}, {:atom, _, TypespecSample}] = struct_args
assert {:type, _, :map_field_exact, [{:atom, _, :f10}, {:atom, _, :ok}]} = arg1
assert {:type, _, :map_field_exact, [{:atom, _, :f11}, {:atom, _, :ok}]} = arg2
end
test "@type with struct does not @enforce_keys" do
bytecode =
test_module do
@enforce_keys [:other]
defstruct hello: nil, other: nil
@type my_type :: %TypespecSample{hello: :world}
end
assert [type: {:my_type, _type, []}] = types(bytecode)
end
test "@type with undefined struct" do
assert_raise CompileError, ~r"ThisModuleDoesNotExist.__struct__/0 is undefined", fn ->
test_module do
@type my_type :: %ThisModuleDoesNotExist{}
end
end
assert_raise CompileError, ~r"cannot access struct TypespecTest.TypespecSample", fn ->
test_module do
@type my_type :: %TypespecSample{}
end
end
end
test "@type with a struct with undefined field" do
assert_raise CompileError, ~r"undefined field :no_field on struct TypespecSample", fn ->
test_module do
defstruct [:hello, :eric]
@type my_type :: %TypespecSample{no_field: :world}
end
end
end
test "@type when overriding Elixir built-in" do
assert_raise CompileError, ~r"type struct/0 is a built-in type", fn ->
test_module do
@type struct :: :oops
end
end
end
test "@type when overriding Erlang built-in" do
assert_raise CompileError, ~r"type list/0 is a built-in type", fn ->
test_module do
@type list :: :oops
end
end
end
test "@type with public record" do
bytecode =
test_module do
require Record
Record.defrecord(:timestamp, date: 1, time: 2)
@type my_type :: record(:timestamp, time: :foo)
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :tuple, [timestamp, term, foo]} = type
assert {:atom, 0, :timestamp} = timestamp
assert {:ann_type, 0, [{:var, 0, :date}, {:type, 0, :term, []}]} = term
assert {:ann_type, 0, [{:var, 0, :time}, {:atom, 0, :foo}]} = foo
end
test "@type with private record" do
bytecode =
test_module do
require Record
Record.defrecordp(:timestamp, date: 1, time: 2)
@type my_type :: record(:timestamp, time: :foo)
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :tuple, args} = type
assert [
{:atom, 0, :timestamp},
{:ann_type, 0, [{:var, 0, :date}, {:type, 0, :term, []}]},
{:ann_type, 0, [{:var, 0, :time}, {:atom, 0, :foo}]}
] = args
end
test "@type with named record" do
bytecode =
test_module do
require Record
Record.defrecord(:timestamp, :my_timestamp, date: 1, time: 2)
@type my_type :: record(:timestamp, time: :foo)
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :tuple, [my_timestamp, term, _foo]} = type
assert {:atom, 0, :my_timestamp} = my_timestamp
assert {:ann_type, 0, [{:var, 0, :date}, {:type, 0, :term, []}]} = term
assert {:ann_type, 0, [{:var, 0, :time}, {:atom, 0, :foo}]}
end
test "@type with undefined record" do
assert_raise CompileError, ~r"unknown record :this_record_does_not_exist", fn ->
test_module do
@type my_type :: record(:this_record_does_not_exist, [])
end
end
end
test "@type with a record with undefined field" do
assert_raise CompileError, ~r"undefined field no_field on record :timestamp", fn ->
test_module do
require Record
Record.defrecord(:timestamp, date: 1, time: 2)
@type my_type :: record(:timestamp, no_field: :foo)
end
end
end
test "@type with a record which declares the name as the type `atom` rather than an atom literal" do
assert_raise CompileError, ~r"expected the record name to be an atom literal", fn ->
test_module do
@type my_type :: record(atom, field: :foo)
end
end
end
test "@type can be named record" do
bytecode =
test_module do
@type record :: binary
@spec foo?(record) :: boolean
def foo?(_), do: true
end
assert [type: {:record, {:type, _, :binary, []}, []}] = types(bytecode)
end
test "@type with an invalid map notation" do
assert_raise CompileError, ~r"invalid map specification", fn ->
test_module do
@type content :: %{atom | String.t() => term}
end
end
end
test "@type with list shortcuts" do
bytecode =
test_module do
@type my_type :: []
@type my_type1 :: [integer]
@type my_type2 :: [integer, ...]
end
assert [
type: {:my_type, {:type, _, nil, []}, []},
type: {:my_type1, {:type, _, :list, [{:type, _, :integer, []}]}, []},
type: {:my_type2, {:type, _, :nonempty_list, [{:type, _, :integer, []}]}, []}
] = types(bytecode)
end
test "@type with a fun" do
bytecode =
test_module do
@type my_type :: (... -> any)
end
assert [type: {:my_type, {:type, _, :fun, []}, []}] = types(bytecode)
end
test "@type with a fun with multiple arguments and return type" do
bytecode =
test_module do
@type my_type :: (integer, integer -> integer)
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :fun, [args, return_type]} = type
assert {:type, _, :product, [{:type, _, :integer, []}, {:type, _, :integer, []}]} = args
assert {:type, _, :integer, []} = return_type
end
test "@type with a fun with no arguments and return type" do
bytecode =
test_module do
@type my_type :: (() -> integer)
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :fun, [{:type, _, :product, []}, {:type, _, :integer, []}]} = type
end
test "@type with a fun with any arity and return type" do
bytecode =
test_module do
@type my_type :: (... -> integer)
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :fun, [{:type, _, :any}, {:type, _, :integer, []}]} = type
end
test "@type with a union" do
bytecode =
test_module do
@type my_type :: integer | charlist | atom
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :union, [integer, charlist, atom]} = type
assert {:type, _, :integer, []} = integer
assert {:remote_type, _, [{:atom, _, :elixir}, {:atom, _, :charlist}, []]} = charlist
assert {:type, _, :atom, []} = atom
end
test "@type with keywords" do
bytecode =
test_module do
@type my_type :: [first: integer, step: integer, last: integer]
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :list, [{:type, _, :union, union_types}]} = type
assert [
{:type, _, :tuple, [{:atom, _, :first}, {:type, _, :integer, []}]},
{:type, _, :tuple, [{:atom, _, :step}, {:type, _, :integer, []}]},
{:type, _, :tuple, [{:atom, _, :last}, {:type, _, :integer, []}]}
] = union_types
end
test "@type with parameters" do
bytecode =
test_module do
@type my_type(x) :: x
@type my_type1(x) :: list(x)
@type my_type2(x, y) :: {x, y}
end
assert [
type: {:my_type, {:var, _, :x}, [{:var, _, :x}]},
type: {:my_type1, {:type, _, :list, [{:var, _, :x}]}, [{:var, _, :x}]},
type: {:my_type2, my_type2, [{:var, _, :x}, {:var, _, :y}]}
] = types(bytecode)
assert {:type, _, :tuple, [{:var, _, :x}, {:var, _, :y}]} = my_type2
end
test "@type with annotations" do
bytecode =
test_module do
@type my_type :: named :: integer
@type my_type1 :: (a :: integer -> integer)
end
assert [type: {:my_type, my_type, []}, type: {:my_type1, my_type1, []}] = types(bytecode)
assert {:ann_type, _, [{:var, _, :named}, {:type, _, :integer, []}]} = my_type
assert {:type, _, :fun, [fun_args, fun_return]} = my_type1
assert {:type, _, :product, [{:ann_type, _, [a, {:type, _, :integer, []}]}]} = fun_args
assert {:var, _, :a} = a
assert {:type, _, :integer, []} = fun_return
end
test "@opaque(type)" do
bytecode =
test_module do
@opaque my_type(x) :: x
end
assert [opaque: {:my_type, {:var, _, :x}, [{:var, _, :x}]}] = types(bytecode)
end
test "@type + opaque" do
bytecode =
test_module do
@type my_type :: tuple
@opaque my_type1 :: {}
end
assert [opaque: {:my_type1, _, []}, type: {:my_type, _, []}] = types(bytecode)
end
test "@type unquote fragment" do
quoted =
quote unquote: false do
name = :my_type
type = :foo
@type unquote(name)() :: unquote(type)
end
bytecode =
test_module do
Module.eval_quoted(__MODULE__, quoted)
end
assert [type: {:my_type, {:atom, _, :foo}, []}] = types(bytecode)
end
test "@type with module attributes" do
bytecode =
test_module do
@keyword Keyword
@type kw :: @keyword.t
@type kw(value) :: @keyword.t(value)
end
assert [type: {:kw, kw, _}, type: {:kw, kw_with_value, [{:var, _, :value}]}] =
types(bytecode)
assert {:remote_type, _, [{:atom, _, Keyword}, {:atom, _, :t}, []]} = kw
assert {:remote_type, _, kw_with_value_args} = kw_with_value
assert [{:atom, _, Keyword}, {:atom, _, :t}, [{:var, _, :value}]] = kw_with_value_args
end
test "invalid remote @type with module attribute that does not evaluate to a module" do
assert_raise CompileError, ~r/\(@foo is "bar"\)/, fn ->
test_module do
@foo "bar"
@type t :: @foo.t
end
end
end
test "defines_type?" do
test_module do
@type my_type :: tuple
@type my_type(a) :: [a]
assert Kernel.Typespec.defines_type?(__MODULE__, {:my_type, 0})
assert Kernel.Typespec.defines_type?(__MODULE__, {:my_type, 1})
refute Kernel.Typespec.defines_type?(__MODULE__, {:my_type, 2})
end
end
test "spec_to_callback/2" do
bytecode =
test_module do
@spec foo() :: term()
def foo(), do: :ok
Kernel.Typespec.spec_to_callback(__MODULE__, {:foo, 0})
end
assert specs(bytecode) == callbacks(bytecode)
end
test "@spec(spec)" do
bytecode =
test_module do
def my_fun1(x), do: x
def my_fun2(), do: :ok
def my_fun3(x, y), do: {x, y}
def my_fun4(x), do: x
@spec my_fun1(integer) :: integer
@spec my_fun2() :: integer
@spec my_fun3(integer, integer) :: {integer, integer}
@spec my_fun4(x :: integer) :: integer
end
assert [my_fun1, my_fun2, my_fun3, my_fun4] = specs(bytecode)
assert {{:my_fun1, 1}, [{:type, _, :fun, args}]} = my_fun1
assert [{:type, _, :product, [{:type, _, :integer, []}]}, {:type, _, :integer, []}] = args
assert {{:my_fun2, 0}, [{:type, _, :fun, args}]} = my_fun2
assert [{:type, _, :product, []}, {:type, _, :integer, []}] = args
assert {{:my_fun3, 2}, [{:type, _, :fun, [arg1, arg2]}]} = my_fun3
assert {:type, _, :product, [{:type, _, :integer, []}, {:type, _, :integer, []}]} = arg1
assert {:type, _, :tuple, [{:type, _, :integer, []}, {:type, _, :integer, []}]} = arg2
assert {{:my_fun4, 1}, [{:type, _, :fun, args}]} = my_fun4
assert [x, {:type, _, :integer, []}] = args
assert {:type, _, :product, [{:ann_type, _, [{:var, _, :x}, {:type, _, :integer, []}]}]} = x
end
test "@spec(spec) with tuples and tuple vars" do
bytecode =
test_module do
def my_fun1(x), do: x
def my_fun2(x), do: x
@spec my_fun1(tuple) :: tuple
@spec my_fun2(tuple) :: tuple when tuple: {integer, integer}
end
assert [my_fun1, my_fun2] = specs(bytecode)
assert {{:my_fun1, 1}, [{:type, _, :fun, args}]} = my_fun1
assert [{:type, _, :product, [{:type, _, :tuple, :any}]}, {:type, _, :tuple, :any}] = args
assert {{:my_fun2, 1}, [{:type, _, :bounded_fun, args}]} = my_fun2
assert [type, _] = args
assert {:type, _, :fun, [{:type, _, :product, [{:var, _, :tuple}]}, {:var, _, :tuple}]} =
type
end
test "@spec(spec) with guards" do
bytecode =
test_module do
def my_fun1(x), do: x
@spec my_fun1(x) :: boolean when x: integer
def my_fun2(x), do: x
@spec my_fun2(x) :: x when x: var
def my_fun3(_x, y), do: y
@spec my_fun3(x, y) :: y when y: x, x: var
end
assert [my_fun1, my_fun2, my_fun3] = specs(bytecode)
assert {{:my_fun1, 1}, [{:type, _, :bounded_fun, args}]} = my_fun1
assert [{:type, _, :fun, [product, {:type, _, :boolean, []}]}, constraints] = args
assert {:type, _, :product, [{:var, _, :x}]} = product
assert [{:type, _, :constraint, subtype}] = constraints
assert [{:atom, _, :is_subtype}, [{:var, _, :x}, {:type, _, :integer, []}]] = subtype
assert {{:my_fun2, 1}, [{:type, _, :fun, args}]} = my_fun2
assert [{:type, _, :product, [{:var, _, :x}]}, {:var, _, :x}] = args
assert {{:my_fun3, 2}, [{:type, _, :bounded_fun, args}]} = my_fun3
assert [{:type, _, :fun, fun_type}, [{:type, _, :constraint, constraint_type}]] = args
assert [{:type, _, :product, [{:var, _, :x}, {:var, _, :y}]}, {:var, _, :y}] = fun_type
assert [{:atom, _, :is_subtype}, [{:var, _, :y}, {:var, _, :x}]] = constraint_type
end
test "@type, @opaque, and @typep as module attributes" do
defmodule TypeModuleAttributes do
@type type1 :: boolean
@opaque opaque1 :: boolean
@typep typep1 :: boolean
def type1, do: @type
def opaque1, do: @opaque
def typep1, do: @typep
@type type2 :: atom
@type type3 :: pid
@opaque opaque2 :: atom
@opaque opaque3 :: pid
@typep typep2 :: atom
def type2, do: @type
def opaque2, do: @opaque
def typep2, do: @typep
# Avoid unused warnings
@spec foo(typep1) :: typep2
def foo(_x), do: :ok
end
assert [
{:type, {:"::", _, [{:type1, _, _}, {:boolean, _, _}]}, {TypeModuleAttributes, _}}
] = TypeModuleAttributes.type1()
assert [
{:type, {:"::", _, [{:type3, _, _}, {:pid, _, _}]}, {TypeModuleAttributes, _}},
{:type, {:"::", _, [{:type2, _, _}, {:atom, _, _}]}, {TypeModuleAttributes, _}},
{:type, {:"::", _, [{:type1, _, _}, {:boolean, _, _}]}, {TypeModuleAttributes, _}}
] = TypeModuleAttributes.type2()
assert [
{:opaque, {:"::", _, [{:opaque1, _, _}, {:boolean, _, _}]},
{TypeModuleAttributes, _}}
] = TypeModuleAttributes.opaque1()
assert [
{:opaque, {:"::", _, [{:opaque3, _, _}, {:pid, _, _}]}, {TypeModuleAttributes, _}},
{:opaque, {:"::", _, [{:opaque2, _, _}, {:atom, _, _}]},
{TypeModuleAttributes, _}},
{:opaque, {:"::", _, [{:opaque1, _, _}, {:boolean, _, _}]},
{TypeModuleAttributes, _}}
] = TypeModuleAttributes.opaque2()
assert [
{:typep, {:"::", _, [{:typep1, _, _}, {:boolean, _, _}]},
{TypeModuleAttributes, _}}
] = TypeModuleAttributes.typep1()
assert [
{:typep, {:"::", _, [{:typep2, _, _}, {:atom, _, _}]}, {TypeModuleAttributes, _}},
{:typep, {:"::", _, [{:typep1, _, _}, {:boolean, _, _}]},
{TypeModuleAttributes, _}}
] = TypeModuleAttributes.typep2()
after
:code.delete(TypeModuleAttributes)
:code.purge(TypeModuleAttributes)
end
test "@spec, @callback, and @macrocallback as module attributes" do
defmodule SpecModuleAttributes do
@callback callback1 :: integer
@macrocallback macrocallback1 :: integer
@spec spec1 :: boolean
def spec1, do: @spec
@callback callback2 :: var when var: boolean
@macrocallback macrocallback2 :: var when var: boolean
@spec spec2 :: atom
def spec2, do: @spec
@spec spec3 :: pid
def spec3, do: :ok
def spec4, do: @spec
def callback, do: @callback
def macrocallback, do: @macrocallback
end
assert [
{:spec, {:"::", _, [{:spec1, _, _}, {:boolean, _, _}]}, {SpecModuleAttributes, _}}
] = SpecModuleAttributes.spec1()
assert [
{:spec, {:"::", _, [{:spec2, _, _}, {:atom, _, _}]}, {SpecModuleAttributes, _}},
{:spec, {:"::", _, [{:spec1, _, _}, {:boolean, _, _}]}, {SpecModuleAttributes, _}}
] = SpecModuleAttributes.spec2()
assert [
{:spec, {:"::", _, [{:spec3, _, _}, {:pid, _, _}]}, {SpecModuleAttributes, _}},
{:spec, {:"::", _, [{:spec2, _, _}, {:atom, _, _}]}, {SpecModuleAttributes, _}},
{:spec, {:"::", _, [{:spec1, _, _}, {:boolean, _, _}]}, {SpecModuleAttributes, _}}
] = SpecModuleAttributes.spec4()
assert [
{:callback,
{:when, _,
[{:"::", _, [{:callback2, _, _}, {:var, _, _}]}, [var: {:boolean, _, _}]]},
{SpecModuleAttributes, _}},
{:callback, {:"::", _, [{:callback1, _, _}, {:integer, _, _}]},
{SpecModuleAttributes, _}}
] = SpecModuleAttributes.callback()
assert [
{:macrocallback,
{:when, _,
[{:"::", _, [{:macrocallback2, _, _}, {:var, _, _}]}, [var: {:boolean, _, _}]]},
{SpecModuleAttributes, _}},
{:macrocallback, {:"::", _, [{:macrocallback1, _, _}, {:integer, _, _}]},
{SpecModuleAttributes, _}}
] = SpecModuleAttributes.macrocallback()
after
:code.delete(SpecModuleAttributes)
:code.purge(SpecModuleAttributes)
end
test "@callback(callback)" do
bytecode =
test_module do
@callback my_fun(integer) :: integer
@callback my_fun(list) :: list
@callback my_fun() :: integer
@callback my_fun(integer, integer) :: {integer, integer}
end
assert [my_fun_0, my_fun_1, my_fun_2] = callbacks(bytecode)
assert {{:my_fun, 0}, [{:type, _, :fun, args}]} = my_fun_0
assert [{:type, _, :product, []}, {:type, _, :integer, []}] = args
assert {{:my_fun, 1}, [clause1, clause2]} = my_fun_1
assert {:type, _, :fun, args1} = clause1
assert [{:type, _, :product, [{:type, _, :integer, []}]}, {:type, _, :integer, []}] = args1
assert {:type, _, :fun, args2} = clause2
assert [{:type, _, :product, [{:type, _, :list, []}]}, {:type, _, :list, []}] = args2
assert {{:my_fun, 2}, [{:type, _, :fun, [args_type, return_type]}]} = my_fun_2
assert {:type, _, :product, [{:type, _, :integer, []}, {:type, _, :integer, []}]} =
args_type
assert {:type, _, :tuple, [{:type, _, :integer, []}, {:type, _, :integer, []}]} =
return_type
end
test "@spec + @callback" do
bytecode =
test_module do
def my_fun(x), do: x
@spec my_fun(integer) :: integer
@spec my_fun(charlist) :: charlist
@callback cb(integer) :: integer
end
assert [{{:cb, 1}, [{:type, _, :fun, args}]}] = callbacks(bytecode)
assert [{:type, _, :product, [{:type, _, :integer, []}]}, {:type, _, :integer, []}] = args
assert [{{:my_fun, 1}, [integer_clause, charlist_clause]}] = specs(bytecode)
assert {:type, _, :fun, [{:type, _, :product, [arg]}, return]} = integer_clause
assert {:type, _, :integer, []} = arg
assert {:type, _, :integer, []} = return
assert {:type, _, :fun, [{:type, _, :product, [arg]}, return]} = charlist_clause
assert {:remote_type, _, [{:atom, _, :elixir}, {:atom, _, :charlist}, []]} = arg
assert {:remote_type, _, [{:atom, _, :elixir}, {:atom, _, :charlist}, []]} = return
end
test "block handling" do
bytecode =
test_module do
@spec foo((() -> [integer])) :: integer
def foo(_), do: 1
end
assert [{{:foo, 1}, [{:type, _, :fun, [args, return]}]}] = specs(bytecode)
assert {:type, _, :product, [{:type, _, :fun, fun_args}]} = args
assert [{:type, _, :product, []}, {:type, _, :list, [{:type, _, :integer, []}]}] = fun_args
assert {:type, _, :integer, []} = return
end
end
describe "Code.Typespec" do
test "type_to_quoted" do
quoted =
Enum.sort([
quote(do: @type(with_ann() :: t :: atom())),
quote(do: @type(a_tuple() :: tuple())),
quote(do: @type(empty_tuple() :: {})),
quote(do: @type(one_tuple() :: {:foo})),
quote(do: @type(two_tuple() :: {:foo, :bar})),
quote(do: @type(imm_type_1() :: 1)),
quote(do: @type(imm_type_2() :: :foo)),
quote(do: @type(simple_type() :: integer())),
quote(do: @type(param_type(p) :: [p])),
quote(do: @type(union_type() :: integer() | binary() | boolean())),
quote(do: @type(binary_type1() :: <<_::_*8>>)),
quote(do: @type(binary_type2() :: <<_::3>>)),
quote(do: @type(binary_type3() :: <<_::3, _::_*8>>)),
quote(do: @type(tuple_type() :: {integer()})),
quote(
do: @type(ftype() :: (() -> any()) | (() -> integer()) | (integer() -> integer()))
),
quote(do: @type(cl() :: charlist())),
quote(do: @type(st() :: struct())),
quote(do: @type(ab() :: as_boolean(term()))),
quote(do: @type(kw() :: keyword())),
quote(do: @type(kwt() :: keyword(term()))),
quote(do: @type(vaf() :: (... -> any()))),
quote(do: @type(rng() :: 1..10)),
quote(do: @type(opts() :: [first: integer(), step: integer(), last: integer()])),
quote(do: @type(ops() :: {+1, -1})),
quote(do: @type(a_map() :: map())),
quote(do: @type(empty_map() :: %{})),
quote(do: @type(my_map() :: %{hello: :world})),
quote(do: @type(my_req_map() :: %{required(0) => :foo})),
quote(do: @type(my_opt_map() :: %{optional(0) => :foo})),
quote(do: @type(my_struct() :: %TypespecTest{hello: :world})),
quote(do: @type(list1() :: list())),
quote(do: @type(list2() :: [0])),
quote(do: @type(list3() :: [...])),
quote(do: @type(list4() :: [0, ...])),
quote(do: @type(nil_list() :: []))
])
bytecode =
test_module do
Module.eval_quoted(__MODULE__, quoted)
end
types = types(bytecode)
Enum.each(Enum.zip(types, quoted), fn {{:type, type}, definition} ->
ast = Code.Typespec.type_to_quoted(type)
assert Macro.to_string(quote(do: @type(unquote(ast)))) == Macro.to_string(definition)
end)
end
test "type_to_quoted for paren_type" do
type = {:my_type, {:paren_type, 0, [{:type, 0, :integer, []}]}, []}
assert Code.Typespec.type_to_quoted(type) ==
{:"::", [], [{:my_type, [], []}, {:integer, [line: 0], []}]}
end
test "spec_to_quoted" do
quoted =
Enum.sort([
quote(do: @spec(foo() :: integer())),
quote(do: @spec(foo(atom()) :: integer() | [{}])),
quote(do: @spec(foo(arg) :: integer() when [arg: integer()])),
quote(do: @spec(foo(arg) :: arg when [arg: var])),
quote(do: @spec(foo(arg :: atom()) :: atom()))
])
bytecode =
test_module do
def foo(), do: 1
def foo(arg), do: arg
Module.eval_quoted(__MODULE__, quote(do: (unquote_splicing(quoted))))
end
specs =
Enum.flat_map(specs(bytecode), fn {{_, _}, specs} ->
Enum.map(specs, fn spec ->
quote(do: @spec(unquote(Code.Typespec.spec_to_quoted(:foo, spec))))
end)
end)
specs_with_quoted = specs |> Enum.sort() |> Enum.zip(quoted)
Enum.each(specs_with_quoted, fn {spec, definition} ->
assert Macro.to_string(spec) == Macro.to_string(definition)
end)
end
test "non-variables are given as arguments" do
msg = ~r/The type one_bad_variable\/1 has an invalid argument\(s\): String.t\(\)/
assert_raise CompileError, msg, fn ->
test_module do
@type one_bad_variable(String.t()) :: String.t()
end
end
msg = ~r/The type two_bad_variables\/2 has an invalid argument\(s\): :ok, Enum.t\(\)/
assert_raise CompileError, msg, fn ->
test_module do
@type two_bad_variables(:ok, Enum.t()) :: {:ok, []}
end
end
msg = ~r/The type one_bad_one_good\/2 has an invalid argument\(s\): \"\"/
assert_raise CompileError, msg, fn ->
test_module do
@type one_bad_one_good(input1, "") :: {:ok, input1}
end
end
end
test "retrieval invalid data" do
assert Code.Typespec.fetch_types(Unknown) == :error
assert Code.Typespec.fetch_specs(Unknown) == :error
end
# This is a test that implements all types specified in lib/elixir/pages/typespecs.md
test "documented types and their AST" do
defmodule SomeStruct do
defstruct [:key]
end
quoted =
Enum.sort([
## Basic types
quote(do: @type(basic_any() :: any())),
quote(do: @type(basic_none() :: none())),
quote(do: @type(basic_atom() :: atom())),
quote(do: @type(basic_map() :: map())),
quote(do: @type(basic_pid() :: pid())),
quote(do: @type(basic_port() :: port())),
quote(do: @type(basic_reference() :: reference())),
quote(do: @type(basic_struct() :: struct())),
quote(do: @type(basic_tuple() :: tuple())),
# Numbers
quote(do: @type(basic_float() :: float())),
quote(do: @type(basic_integer() :: integer())),
quote(do: @type(basic_neg_integer() :: neg_integer())),
quote(do: @type(basic_non_neg_integer() :: non_neg_integer())),
quote(do: @type(basic_pos_integer() :: pos_integer())),
# Lists
quote(do: @type(basic_list_type() :: list(integer()))),
quote(do: @type(basic_nonempty_list_type() :: nonempty_list(integer()))),
quote do
@type basic_maybe_improper_list_type() :: maybe_improper_list(integer(), atom())
end,
quote do
@type basic_nonempty_improper_list_type() :: nonempty_improper_list(integer(), atom())
end,
quote do
@type basic_nonempty_maybe_improper_list_type() ::
nonempty_maybe_improper_list(integer(), atom())
end,
## Literals
quote(do: @type(literal_atom() :: :atom)),
quote(do: @type(literal_integer() :: 1)),
quote(do: @type(literal_integers() :: 1..10)),
quote(do: @type(literal_empty_bitstring() :: <<>>)),
quote(do: @type(literal_size_0() :: <<_::0>>)),
quote(do: @type(literal_unit_1() :: <<_::_*1>>)),
quote(do: @type(literal_size_1_unit_8() :: <<_::100, _::_*256>>)),
quote(do: @type(literal_function_arity_any() :: (... -> integer()))),
quote(do: @type(literal_function_arity_0() :: (() -> integer()))),
quote(do: @type(literal_function_arity_2() :: (integer(), atom() -> integer()))),
quote(do: @type(literal_list_type() :: [integer()])),
quote(do: @type(literal_empty_list() :: [])),
quote(do: @type(literal_list_nonempty() :: [...])),
quote(do: @type(literal_nonempty_list_type() :: [atom(), ...])),
quote(do: @type(literal_keyword_list_fixed_key() :: [key: integer()])),
quote(do: @type(literal_keyword_list_fixed_key2() :: [{:key, integer()}])),
quote(do: @type(literal_keyword_list_type_key() :: [{binary(), integer()}])),
quote(do: @type(literal_empty_map() :: %{})),
quote(do: @type(literal_map_with_key() :: %{key: integer()})),
quote(
do: @type(literal_map_with_required_key() :: %{required(bitstring()) => integer()})
),
quote(
do: @type(literal_map_with_optional_key() :: %{optional(bitstring()) => integer()})
),
quote(do: @type(literal_struct_all_fields_any_type() :: %SomeStruct{})),
quote(do: @type(literal_struct_all_fields_key_type() :: %SomeStruct{key: integer()})),
quote(do: @type(literal_empty_tuple() :: {})),
quote(do: @type(literal_2_element_tuple() :: {1, 2})),
## Built-in types
quote(do: @type(built_in_term() :: term())),
quote(do: @type(built_in_arity() :: arity())),
quote(do: @type(built_in_as_boolean() :: as_boolean(:t))),
quote(do: @type(built_in_binary() :: binary())),
quote(do: @type(built_in_bitstring() :: bitstring())),
quote(do: @type(built_in_boolean() :: boolean())),
quote(do: @type(built_in_byte() :: byte())),
quote(do: @type(built_in_char() :: char())),
quote(do: @type(built_in_charlist() :: charlist())),
quote(do: @type(built_in_nonempty_charlist() :: nonempty_charlist())),
quote(do: @type(built_in_fun() :: fun())),
quote(do: @type(built_in_function() :: function())),
quote(do: @type(built_in_identifier() :: identifier())),
quote(do: @type(built_in_iodata() :: iodata())),
quote(do: @type(built_in_iolist() :: iolist())),
quote(do: @type(built_in_keyword() :: keyword())),
quote(do: @type(built_in_keyword_value_type() :: keyword(:t))),
quote(do: @type(built_in_list() :: list())),
quote(do: @type(built_in_nonempty_list() :: nonempty_list())),
quote(do: @type(built_in_maybe_improper_list() :: maybe_improper_list())),
quote(
do: @type(built_in_nonempty_maybe_improper_list() :: nonempty_maybe_improper_list())
),
quote(do: @type(built_in_mfa() :: mfa())),
quote(do: @type(built_in_module() :: module())),
quote(do: @type(built_in_no_return() :: no_return())),
quote(do: @type(built_in_node() :: node())),
quote(do: @type(built_in_number() :: number())),
quote(do: @type(built_in_struct() :: struct())),
quote(do: @type(built_in_timeout() :: timeout())),
## Remote types
quote(do: @type(remote_enum_t0() :: Enum.t())),
quote(do: @type(remote_keyword_t1() :: Keyword.t(integer())))
])
bytecode =
test_module do
Module.eval_quoted(__MODULE__, quoted)
end
types = types(bytecode)
Enum.each(Enum.zip(types, quoted), fn {{:type, type}, definition} ->
ast = Code.Typespec.type_to_quoted(type)
ast_string = Macro.to_string(quote(do: @type(unquote(ast))))
case type do
# These cases do not translate directly to their own string version.
{:basic_list_type, _, _} ->
assert ast_string == "@type(basic_list_type() :: [integer()])"
{:basic_nonempty_list_type, _, _} ->
assert ast_string == "@type(basic_nonempty_list_type() :: [integer(), ...])"
{:literal_empty_bitstring, _, _} ->
assert ast_string == "@type(literal_empty_bitstring() :: <<_::0>>)"
{:literal_keyword_list_fixed_key, _, _} ->
assert ast_string == "@type(literal_keyword_list_fixed_key() :: [{:key, integer()}])"
{:literal_keyword_list_fixed_key2, _, _} ->
assert ast_string == "@type(literal_keyword_list_fixed_key2() :: [{:key, integer()}])"
{:literal_struct_all_fields_any_type, _, _} ->
assert ast_string ==
"@type(literal_struct_all_fields_any_type() :: %TypespecTest.SomeStruct{key: term()})"
{:literal_struct_all_fields_key_type, _, _} ->
assert ast_string ==
"@type(literal_struct_all_fields_key_type() :: %TypespecTest.SomeStruct{key: integer()})"
{:built_in_fun, _, _} ->
assert ast_string == "@type(built_in_fun() :: (... -> any()))"
{:built_in_nonempty_list, _, _} ->
assert ast_string == "@type(built_in_nonempty_list() :: [...])"
_ ->
assert ast_string == Macro.to_string(definition)
end
end)
end
end
describe "behaviour_info" do
defmodule SampleCallbacks do
@callback first(integer) :: integer
@callback foo(atom(), binary) :: binary
@callback bar(External.hello(), my_var :: binary) :: binary
@callback guarded(my_var) :: my_var when my_var: binary
@callback orr(atom | integer) :: atom
@callback literal(123, {atom}, :foo, [integer], true) :: atom
@macrocallback last(integer) :: Macro.t()
@macrocallback last() :: atom
@optional_callbacks bar: 2, last: 0
@optional_callbacks first: 1
end
test "defines callbacks" do
expected_callbacks = [
"MACRO-last": 1,
"MACRO-last": 2,
bar: 2,
first: 1,
foo: 2,
guarded: 1,
literal: 5,
orr: 1
]
assert Enum.sort(SampleCallbacks.behaviour_info(:callbacks)) == expected_callbacks
end
test "defines optional callbacks" do
assert Enum.sort(SampleCallbacks.behaviour_info(:optional_callbacks)) ==
["MACRO-last": 1, bar: 2, first: 1]
end
end
end
| 33.688719 | 110 | 0.534758 |
1c94ca8a1a512638b641ff7e4ffa8e5773e74cb9 | 2,026 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/creative_contexts_1.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/creative_contexts_1.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/creative_contexts_1.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AdExchangeBuyer.V14.Model.CreativeContexts1 do
@moduledoc """
## Attributes
- auctionType (List[String]): Only set when contextType=AUCTION_TYPE. Represents the auction types this restriction applies to. Defaults to: `null`.
- contextType (String): The type of context (e.g., location, platform, auction type, SSL-ness). Defaults to: `null`.
- geoCriteriaId (List[Integer]): Only set when contextType=LOCATION. Represents the geo criterias this restriction applies to. Impressions are considered to match a context if either the user location or publisher location matches a given geoCriteriaId. Defaults to: `null`.
- platform (List[String]): Only set when contextType=PLATFORM. Represents the platforms this restriction applies to. Defaults to: `null`.
"""
defstruct [
:"auctionType",
:"contextType",
:"geoCriteriaId",
:"platform"
]
end
defimpl Poison.Decoder, for: GoogleApi.AdExchangeBuyer.V14.Model.CreativeContexts1 do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.AdExchangeBuyer.V14.Model.CreativeContexts1 do
def encode(value, options) do
GoogleApi.AdExchangeBuyer.V14.Deserializer.serialize_non_nil(value, options)
end
end
| 38.961538 | 281 | 0.758144 |
1c94d1601703330b88a62b22bd9a80307d41b539 | 6,613 | ex | Elixir | lib/transmission.ex | SparkPost/elixir-sparkpost | cf0b1b5300235b8b1c06b37b8db5e6b30f1f88f5 | [
"Apache-2.0"
] | 47 | 2016-01-30T07:24:10.000Z | 2021-05-29T13:47:49.000Z | lib/transmission.ex | SparkPost/elixir-sparkpost | cf0b1b5300235b8b1c06b37b8db5e6b30f1f88f5 | [
"Apache-2.0"
] | 48 | 2016-01-21T22:11:14.000Z | 2020-10-12T06:13:50.000Z | lib/transmission.ex | SparkPost/elixir-sparkpost | cf0b1b5300235b8b1c06b37b8db5e6b30f1f88f5 | [
"Apache-2.0"
] | 12 | 2016-02-27T23:04:13.000Z | 2018-05-03T15:34:04.000Z | defmodule SparkPost.Transmission do
@moduledoc """
The SparkPost Transmission API endpoint for sending email. Use `SparkPost.Transmission.send/1` to
send messages, `SparkPost.Transmission.list/1` to list previous sends and `SparkPost.Transmission.get/1` to
retrieve details on a given transmission.
Check out the documentation for each function
or use the [SparkPost API reference](https://www.sparkPost.com/api#/reference/transmissions) for details.
## Request Fields
Used in calls to `SparkPost.Transmission.send/1`.
- campaign_id
- return_path
- metadata
- substitution_data
- recipients
- content
Returned by `SparkPost.Transmission.list/1`.
- id
- campaign_id
- description
- content
Returned by `SparkPost.Transmission.get/1`.
- id
- description
- state
- campaign_id
- content
- return_path
- rcpt_list_chunk_size
- rcpt_list_total_chunks
- num_rcpts
- num_generated
- num_failed_gen
- generation_start_time
- generation_end_time
- substitution_data
- metadata
- options
"""
defstruct options: %SparkPost.Transmission.Options{},
campaign_id: nil,
return_path: nil,
metadata: nil,
substitution_data: nil,
recipients: :required,
content: :required,
id: nil, # System generated fields from this point on
description: nil,
state: nil,
rcpt_list_chunk_size: nil,
rcp_list_total_chunks: nil,
num_rcpts: nil,
num_generated: nil,
num_failed_gen: nil,
generation_start_time: nil,
generation_end_time: nil
alias SparkPost.{Transmission, Recipient, Endpoint, Content}
@doc """
Create a new transmission and send some email.
## Parameters
- %SparkPost.Transmission{} consisting of:
- recipients: ["email@address", %SparkPost.Recipient{}, ...] or %SparkPost.Recipient.ListRef{}
- content: %SparkPost.Content.Inline{}, %SparkPost.Content.Raw{} or %SparkPost.Content.TemplateRef{}
- options: %SparkPost.Transmission.Options{}
- campaign_id: campaign identifier (string)
- return_path: envelope FROM address, available in Enterprise only (email address string)
- metadata: transmission-level metadata k/v pairs (keyword)
- substitution_data: transmission-level substitution_data k/v pairs (keyword)
## Examples
Send a message to a single recipient with inline text and HTML content:
alias SparkPost.{Content, Transmission}
Transmission.send(%Transmission{
recipients: ["to@you.com"],
content: %Content.Inline{
from: "from@me.com",
subject: subject,
text: text,
html: html
}
})
#=> %Transmission.Response{id: "102258889940193104",
total_accepted_recipients: 1, total_rejected_recipients: 0}
Send a message to 2 recipients using a stored message template:
alias SparkPost.Content, Transmission}
Transmission.send(
%Transmission{
recipients: ["to@you.com", "to@youtoo.com"],
content: %Content.TemplateRef{ template_id: "test-template-1" }
}
)
#=> %Transmission.Response{id: "102258889940193105",
total_accepted_recipients: 2, total_rejected_recipients: 0}
Send a message with an attachment:
alias SparkPost.{Content, Transmission}
Transmission.send(
%Transmission{
recipients: ["to@you.com"],
content: %Content.Inline{
subject: "Now with attachments!",
text: "There is an attachment with this message",
attachments: [
Content.to_attachment("cat.jpg", "image/jpeg", File.read!("cat.jpg"))
]
}
}
)
#=> %Transmission.Response{id: "102258889940193106",
total_accepted_recipients: 1, total_rejected_recipients: 0}
"""
def send(%__MODULE__{} = body) do
body = %{body |
recipients: Recipient.to_recipient_list(body.recipients),
content: Content.to_content(body.content)
}
response = Endpoint.request(:post, "transmissions", body)
Endpoint.marshal_response(response, Transmission.Response)
end
@doc """
Retrieve the details of an existing transmission.
## Parameters
- transmission ID: identifier of the transmission to retrieve
## Example
Transmission.get("102258889940193105")
#=> %Transmission{campaign_id: "",
content: %{template_id: "inline", template_version: 0,
use_draft_template: false}, description: "",
generation_end_time: "2016-01-14T12:52:05+00:00",
generation_start_time: "2016-01-14T12:52:05+00:00", id: "48215348926834924",
metadata: "", num_failed_gen: 0, num_generated: 2, num_rcpts: 2,
options: %{click_tracking: true, conversion_tracking: "", open_tracking: true},
rcp_list_total_chunks: nil, rcpt_list_chunk_size: 100, recipients: :required,
return_path: nil, state: "Success",
substitution_data: ""}
"""
def get(transid) do
response = Endpoint.request(:get, "transmissions/" <> transid)
Endpoint.marshal_response(response, __MODULE__, :transmission)
end
@doc """
List all multi-recipient transmissions, possibly filtered by campaign_id and/or content.
## Parameters
- query filters to narrow the list (keyword)
- campaign_id
- template_id
## Example
List all multi-recipient transmissions:
Transmission.list()
#=> [%Transmission{campaign_id: "", content: %{template_id: "inline"},
description: "", generation_end_time: nil, generation_start_time: nil,
id: "102258558346809186", metadata: nil, num_failed_gen: nil,
num_generated: nil, num_rcpts: nil, options: :required,
rcp_list_total_chunks: nil, rcpt_list_chunk_size: nil, recipients: :required,
return_path: :nil, state: "Success", substitution_data: nil},
%Transmission{campaign_id: "", content: %{template_id: "inline"},
description: "", generation_end_time: nil, generation_start_time: nil,
id: "48215348926834924", metadata: nil, num_failed_gen: nil,
num_generated: nil, num_rcpts: nil, options: :required,
rcp_list_total_chunks: nil, rcpt_list_chunk_size: nil, recipients: :required,
return_path: :nil, state: "Success", substitution_data: nil}]
"""
def list(filters\\[]) do
response = Endpoint.request(:get, "transmissions", %{}, %{}, [params: filters])
case response do
%Endpoint.Response{} ->
Enum.map(response.results, fn (trans) -> struct(__MODULE__, trans) end)
_ -> response
end
end
end
| 35.363636 | 109 | 0.672615 |
1c94fe52985f262f4c6d67e38267601173eafd08 | 1,861 | ex | Elixir | clients/authorized_buyers_marketplace/lib/google_api/authorized_buyers_marketplace/v1/model/uri_targeting.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/authorized_buyers_marketplace/lib/google_api/authorized_buyers_marketplace/v1/model/uri_targeting.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/authorized_buyers_marketplace/lib/google_api/authorized_buyers_marketplace/v1/model/uri_targeting.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AuthorizedBuyersMarketplace.V1.Model.UriTargeting do
@moduledoc """
Represents a list of targeted and excluded URLs (for example, google.com). For Private Auction Deals, URLs are either included or excluded. For Programmatic Guaranteed and Preferred Deals, this doesn't apply.
## Attributes
* `excludedUris` (*type:* `list(String.t)`, *default:* `nil`) - A list of URLs to be excluded.
* `targetedUris` (*type:* `list(String.t)`, *default:* `nil`) - A list of URLs to be included.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:excludedUris => list(String.t()) | nil,
:targetedUris => list(String.t()) | nil
}
field(:excludedUris, type: :list)
field(:targetedUris, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.AuthorizedBuyersMarketplace.V1.Model.UriTargeting do
def decode(value, options) do
GoogleApi.AuthorizedBuyersMarketplace.V1.Model.UriTargeting.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AuthorizedBuyersMarketplace.V1.Model.UriTargeting do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.22 | 210 | 0.735626 |
1c95040e70afe9e9279a423ac7f7266a273fc109 | 721 | ex | Elixir | lib/networking/mssp.ex | stevegrossi/ex_venture | e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa | [
"MIT"
] | 2 | 2019-05-14T11:36:44.000Z | 2020-07-01T08:54:04.000Z | lib/networking/mssp.ex | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | null | null | null | lib/networking/mssp.ex | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | 1 | 2021-01-29T14:12:40.000Z | 2021-01-29T14:12:40.000Z | defmodule Networking.MSSP do
@moduledoc """
Helpers for dealing with the MSSP protocol
http://tintin.sourceforge.net/mssp/
"""
@mssp_var 1
@mssp_val 2
alias Game.Config
alias Game.Server
alias Game.Session
def name() do
name = Config.game_name()
<<@mssp_var>> <> "NAME" <> <<@mssp_val>> <> name
end
def players() do
player_count =
Session.Registry.connected_players()
|> length()
|> Integer.to_string()
<<@mssp_var>> <> "PLAYERS" <> <<@mssp_val>> <> player_count
end
def uptime() do
started_at =
Server.started_at()
|> Timex.to_unix()
|> Integer.to_string()
<<@mssp_var>> <> "UPTIME" <> <<@mssp_val>> <> started_at
end
end
| 18.973684 | 63 | 0.604716 |
1c9520bd1c39fa85781872e7d35c84066bf5b72f | 2,989 | exs | Elixir | config/target.exs | mtrudel/upstairsbox | 010da404a3b267188b35e72a3043681821274e4c | [
"MIT"
] | null | null | null | config/target.exs | mtrudel/upstairsbox | 010da404a3b267188b35e72a3043681821274e4c | [
"MIT"
] | null | null | null | config/target.exs | mtrudel/upstairsbox | 010da404a3b267188b35e72a3043681821274e4c | [
"MIT"
] | null | null | null | import Config
# Use shoehorn to start the main application. See the shoehorn
# docs for separating out critical OTP applications such as those
# involved with firmware updates.
config :shoehorn,
init: [:nerves_runtime, :nerves_pack],
app: Mix.Project.config()[:app]
# Nerves Runtime can enumerate hardware devices and send notifications via
# SystemRegistry. This slows down startup and not many programs make use of
# this feature.
config :nerves_runtime, :kernel, use_system_registry: false
# Erlinit can be configured without a rootfs_overlay. See
# https://github.com/nerves-project/erlinit/ for more information on
# configuring erlinit.
config :nerves,
erlinit: [
hostname_pattern: "nerves-%s"
]
# Configure the device for SSH IEx prompt access and firmware updates
#
# * See https://hexdocs.pm/nerves_ssh/readme.html for general SSH configuration
# * See https://hexdocs.pm/ssh_subsystem_fwup/readme.html for firmware updates
keys =
[
Path.join([System.user_home!(), ".ssh", "id_rsa.pub"]),
Path.join([System.user_home!(), ".ssh", "id_ecdsa.pub"]),
Path.join([System.user_home!(), ".ssh", "id_ed25519.pub"])
]
|> Enum.filter(&File.exists?/1)
if keys == [],
do:
Mix.raise("""
No SSH public keys found in ~/.ssh. An ssh authorized key is needed to
log into the Nerves device and update firmware on it using ssh.
See your project's config.exs for this error message.
""")
config :nerves_ssh,
authorized_keys: Enum.map(keys, &File.read!/1)
# Configure the network using vintage_net
# See https://github.com/nerves-networking/vintage_net for more information
config :vintage_net,
regulatory_domain: "US",
config: [
{"usb0", %{type: VintageNetDirect}},
{"eth0",
%{
type: VintageNetEthernet,
ipv4: %{method: :dhcp}
}},
{"wlan0", %{type: VintageNetWiFi}}
]
config :mdns_lite,
# The `host` key specifies what hostnames mdns_lite advertises. `:hostname`
# advertises the device's hostname.local. For the official Nerves systems, this
# is "nerves-<4 digit serial#>.local". mdns_lite also advertises
# "nerves.local" for convenience. If more than one Nerves device is on the
# network, delete "nerves" from the list.
host: [:hostname, "nerves"],
ttl: 120,
# Advertise the following services over mDNS.
services: [
%{
name: "SSH Remote Login Protocol",
protocol: "ssh",
transport: "tcp",
port: 22
},
%{
name: "Secure File Transfer Protocol over SSH",
protocol: "sftp-ssh",
transport: "tcp",
port: 22
},
%{
name: "Erlang Port Mapper Daemon",
protocol: "epmd",
transport: "tcp",
port: 4369
}
]
# Import target specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
# Uncomment to use target specific configurations
# import_config "#{Mix.target()}.exs"
#
config :logger, backends: [RamoopsLogger, RingLogger]
| 29.019417 | 81 | 0.685848 |
1c9540be707509049c41a2c4fc61d59130a3c564 | 15,407 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/api/campaigns.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/api/campaigns.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/api/campaigns.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V34.Api.Campaigns do
@moduledoc """
API calls for all endpoints tagged `Campaigns`.
"""
alias GoogleApi.DFAReporting.V34.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Gets one campaign by ID.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `id` (*type:* `String.t`) - Campaign ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V34.Model.Campaign{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_campaigns_get(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V34.Model.Campaign.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def dfareporting_campaigns_get(connection, profile_id, id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/campaigns/{id}", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1),
"id" => URI.encode(id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.Campaign{}])
end
@doc """
Inserts a new campaign.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:body` (*type:* `GoogleApi.DFAReporting.V34.Model.Campaign.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V34.Model.Campaign{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_campaigns_insert(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V34.Model.Campaign.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def dfareporting_campaigns_insert(connection, profile_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/campaigns", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.Campaign{}])
end
@doc """
Retrieves a list of campaigns, possibly filtered. This method supports paging.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:advertiserGroupIds` (*type:* `list(String.t)`) - Select only campaigns whose advertisers belong to these advertiser groups.
* `:advertiserIds` (*type:* `list(String.t)`) - Select only campaigns that belong to these advertisers.
* `:archived` (*type:* `boolean()`) - Select only archived campaigns. Don't set this field to select both archived and non-archived campaigns.
* `:atLeastOneOptimizationActivity` (*type:* `boolean()`) - Select only campaigns that have at least one optimization activity.
* `:excludedIds` (*type:* `list(String.t)`) - Exclude campaigns with these IDs.
* `:ids` (*type:* `list(String.t)`) - Select only campaigns with these IDs.
* `:maxResults` (*type:* `integer()`) - Maximum number of results to return.
* `:overriddenEventTagId` (*type:* `String.t`) - Select only campaigns that have overridden this event tag ID.
* `:pageToken` (*type:* `String.t`) - Value of the nextPageToken from the previous result page.
* `:searchString` (*type:* `String.t`) - Allows searching for campaigns by name or ID. Wildcards (*) are allowed. For example, "campaign*2015" will return campaigns with names like "campaign June 2015", "campaign April 2015", or simply "campaign 2015". Most of the searches also add wildcards implicitly at the start and the end of the search string. For example, a search string of "campaign" will match campaigns with name "my campaign", "campaign 2015", or simply "campaign".
* `:sortField` (*type:* `String.t`) - Field by which to sort the list.
* `:sortOrder` (*type:* `String.t`) - Order of sorted results.
* `:subaccountId` (*type:* `String.t`) - Select only campaigns that belong to this subaccount.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V34.Model.CampaignsListResponse{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_campaigns_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V34.Model.CampaignsListResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def dfareporting_campaigns_list(connection, profile_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:advertiserGroupIds => :query,
:advertiserIds => :query,
:archived => :query,
:atLeastOneOptimizationActivity => :query,
:excludedIds => :query,
:ids => :query,
:maxResults => :query,
:overriddenEventTagId => :query,
:pageToken => :query,
:searchString => :query,
:sortField => :query,
:sortOrder => :query,
:subaccountId => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/campaigns", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.CampaignsListResponse{}]
)
end
@doc """
Updates an existing campaign. This method supports patch semantics.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `id` (*type:* `String.t`) - Campaign ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:body` (*type:* `GoogleApi.DFAReporting.V34.Model.Campaign.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V34.Model.Campaign{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_campaigns_patch(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V34.Model.Campaign.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def dfareporting_campaigns_patch(connection, profile_id, id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/campaigns", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_param(:query, :id, id)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.Campaign{}])
end
@doc """
Updates an existing campaign.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:body` (*type:* `GoogleApi.DFAReporting.V34.Model.Campaign.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V34.Model.Campaign{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_campaigns_update(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V34.Model.Campaign.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def dfareporting_campaigns_update(connection, profile_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:put)
|> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/campaigns", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.Campaign{}])
end
end
| 46.267267 | 486 | 0.635425 |
1c956048ff0dfce7d103572b08d32a81548c2266 | 592 | ex | Elixir | spec/messages_enqueuers/dummy_modules_for_tests/dummy_enqueuer.ex | botsunit/wok_async | 466f16057ec6af1cb12990b2edd33a34aab7f8ec | [
"BSD-3-Clause"
] | null | null | null | spec/messages_enqueuers/dummy_modules_for_tests/dummy_enqueuer.ex | botsunit/wok_async | 466f16057ec6af1cb12990b2edd33a34aab7f8ec | [
"BSD-3-Clause"
] | null | null | null | spec/messages_enqueuers/dummy_modules_for_tests/dummy_enqueuer.ex | botsunit/wok_async | 466f16057ec6af1cb12990b2edd33a34aab7f8ec | [
"BSD-3-Clause"
] | null | null | null | defmodule WokAsyncMessageHandler.MessagesEnqueuers.DummyEnqueuer do
defmodule Serializers.TestEctoSchema do
def message_versions, do: [1]
def created(struct, _version), do: %{id: struct.id}
def partition_key(struct), do: struct.id
def message_route(event), do: "bot/resource/#{event}"
end
@datastore WokAsyncMessageHandler.Spec.Repo
@producer_name "from_bot"
@realtime_topic "realtime_topic"
@messages_topic "messages_topic"
@serializers WokAsyncMessageHandler.MessagesEnqueuers.DummyEnqueuer.Serializers
use WokAsyncMessageHandler.MessagesEnqueuers.Ecto
end
| 37 | 81 | 0.793919 |
1c957be8b12991105b9a7cec58fd628dd3eac1d0 | 842 | ex | Elixir | lib/phoenix_events_live_web/schema/types.ex | D-D-ing/phoenix-events-live | b68bdfdac1afc241574a0a8ffce25cf43bb71abe | [
"MIT"
] | null | null | null | lib/phoenix_events_live_web/schema/types.ex | D-D-ing/phoenix-events-live | b68bdfdac1afc241574a0a8ffce25cf43bb71abe | [
"MIT"
] | null | null | null | lib/phoenix_events_live_web/schema/types.ex | D-D-ing/phoenix-events-live | b68bdfdac1afc241574a0a8ffce25cf43bb71abe | [
"MIT"
] | null | null | null | defmodule PhoenixEventsLiveWeb.Schema.Types do
use Absinthe.Schema.Notation
use Absinthe.Ecto, repo: PhoenixEventsLive.Repo
object :interaction do
field :id, non_null(:id)
field :name, non_null(:string)
field :type, non_null(:integer)
field :text, :string
field :value, :string
field :visible, :boolean
field :items, list_of(non_null(:string))
field :live_event_id, non_null(:id)
end
object :live_event do
field :id, non_null(:id)
field :name, non_null(:string)
field :description, :string
field :access_token, :string
field :interactions, list_of(non_null(:interaction))
end
object :session do
field :token, :string
end
object :user do
field :name, :string
field :username, non_null(:string)
field :token, :string
field :email, :string
end
end
| 23.388889 | 56 | 0.685273 |
1c9591cb492cf8ee310bd8f479456793e68cefad | 1,665 | ex | Elixir | lib/phoenix/router/route.ex | chvanikoff/phoenix | bf7011b9f87afff0e0012d5f7a3adf0463040765 | [
"MIT"
] | null | null | null | lib/phoenix/router/route.ex | chvanikoff/phoenix | bf7011b9f87afff0e0012d5f7a3adf0463040765 | [
"MIT"
] | null | null | null | lib/phoenix/router/route.ex | chvanikoff/phoenix | bf7011b9f87afff0e0012d5f7a3adf0463040765 | [
"MIT"
] | null | null | null | defmodule Phoenix.Router.Route do
# This module defines the Route struct that is used
# throughout Phoenix's router. This struct is private
# as it contains internal routing information.
@moduledoc false
alias Phoenix.Router.Route
@doc """
The `Phoenix.Router.Route` struct. It stores:
* :verb - the HTTP verb as an upcased string
* :path - the normalized path as string
* :segments - the route path as quoted segments
* :binding - the route bindings
* :controller - the controller module
* :action - the action as an atom
* :helper - the named of the helper as a string (may be nil)
* :pipe_through - the elements to pipe through
"""
defstruct [:verb, :path, :segments, :binding, :controller, :action, :helper, :pipe_through]
@type t :: %Route{}
@doc """
Receives the verb, path, controller, action and helper
and returns a `Phoenix.Router.Route` struct.
"""
@spec build(String.t, String.t, atom, atom, atom, atom) :: t
def build(verb, path, controller, action, helper, pipe_through)
when is_binary(verb) and is_binary(path) and is_atom(controller) and
is_atom(action) and (is_binary(helper) or is_nil(helper)) and
is_list(pipe_through) do
{params, segments} = Plug.Router.Utils.build_match(path)
binding = Enum.map(params, fn var ->
{Atom.to_string(var), Macro.var(var, nil)}
end)
pipes = Enum.reduce(pipe_through, quote(do: var!(conn)), &{&1, [], [&2, []]})
%Route{verb: verb, path: path, segments: segments, binding: binding,
controller: controller, action: action, helper: helper,
pipe_through: pipes}
end
end
| 35.425532 | 93 | 0.667868 |
1c95ba1c2bf983a12e4e00efb0152d26918c7054 | 4,806 | ex | Elixir | lib/mix/lib/mix/tasks/escript.install.ex | TurtleAI/elixir | 2fb41ebef4d06315dd6c05ee00899572b27ee50a | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/escript.install.ex | TurtleAI/elixir | 2fb41ebef4d06315dd6c05ee00899572b27ee50a | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/escript.install.ex | TurtleAI/elixir | 2fb41ebef4d06315dd6c05ee00899572b27ee50a | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Escript.Install do
use Mix.Task
@shortdoc "Installs an escript locally"
@moduledoc """
Installs an escript locally.
If no argument is supplied but there is an escript in the project's root directory
(created with `mix escript.build`), then the escript will be installed
locally. For example:
mix do escript.build, escript.install
If an argument is provided, it should be a local path or a URL to a prebuilt escript,
a git repository, a github repository, or a hex package.
mix escript.install escript
mix escript.install path/to/escript
mix escript.install https://example.com/my_escript
mix escript.install git https://path/to/git/repo
mix escript.install git https://path/to/git/repo branch git_branch
mix escript.install git https://path/to/git/repo tag git_tag
mix escript.install git https://path/to/git/repo ref git_ref
mix escript.install github user/project
mix escript.install github user/project branch git_branch
mix escript.install github user/project tag git_tag
mix escript.install github user/project ref git_ref
mix escript.install hex hex_package
mix escript.install hex hex_package 1.2.3
After installation, the escript can be invoked as
~/.mix/escripts/foo
For convenience, consider adding `~/.mix/escripts` directory to your
`PATH` environment variable. For more information, check the wikipedia
article on PATH: https://en.wikipedia.org/wiki/PATH_(variable)
## Command line options
* `--sha512` - checks the escript matches the given SHA-512 checksum. Only
applies to installations via URL or local path.
* `--force` - forces installation without a shell prompt; primarily
intended for automation in build systems like make
* `--submodules` - fetches repository submodules before building escript from
git or github
* `--app` - specifies a custom app name to be used for building the escript
from git, github, or hex
"""
@behaviour Mix.Local.Installer
@escript_file_mode 0o555 # only read and execute permissions
@switches [force: :boolean, sha512: :string, submodules: :boolean, app: :string]
@spec run(OptionParser.argv) :: boolean
def run(argv) do
Mix.Local.Installer.install({__MODULE__, :escript}, argv, @switches)
end
### Mix.Local.Installer callbacks
def check_install_spec(_, _), do: :ok
def find_previous_versions(_src, dst) do
if File.exists?(dst), do: [dst], else: []
end
def install(dst, binary, _previous) do
if escript?(binary) do
_ = File.rm(dst)
_ = File.rm(dst <> ".bat")
executable = Path.basename(dst)
previous_executable = System.find_executable(executable)
File.mkdir_p!(Path.dirname(dst))
File.write!(dst, binary)
File.chmod!(dst, @escript_file_mode)
write_bat!(dst <> ".bat", :os.type)
Mix.shell.info [:green, "* creating ", :reset, Path.relative_to_cwd(dst)]
check_discoverability(dst, executable, previous_executable)
:ok
else
Mix.raise "The given path does not point to an escript, installation aborted"
end
end
def build(_mixfile) do
Mix.Task.run("escript.build", [])
end
### Private helpers
defp write_bat!(path, {:win32, _}) do
File.write!(path, """
@echo off
@escript "%~dpn0" %*
""")
File.chmod!(path, @escript_file_mode)
end
defp write_bat!(_path, _type) do
:ok
end
defp check_discoverability(dst, executable, previous_executable) do
current_executable = System.find_executable(executable)
cond do
# If existing executable was changed,
# it was overridden
previous_executable && previous_executable != current_executable ->
Mix.shell.error "\nwarning: escript #{inspect executable} overrides executable " <>
"#{inspect previous_executable} already in your PATH\n"
# If existing executable didn't change but it is not the one we installed,
# it is a conflict
previous_executable && previous_executable != dst ->
Mix.shell.error "\nwarning: escript #{inspect executable} conflicts with executable " <>
"#{inspect previous_executable} already in your PATH\n"
# If current executable is nil or does not match the one we just installed,
# PATH is misconfigured
current_executable != dst ->
Mix.shell.error "\nwarning: you must append #{inspect Mix.Local.path_for(:escript)} " <>
"to your PATH if you want to invoke escripts by name\n"
true ->
:ok
end
end
defp escript?(binary) do
parts = String.split(binary, "\n", parts: 4)
match?(["#!" <> _, _, _, <<80, 75, 3, 4, _::binary>>], parts)
end
end
| 33.608392 | 96 | 0.678527 |
1c95d5dd7bbbaa9fbbf10c940b583812becc7a2f | 2,049 | exs | Elixir | test/guardian/plug/ensure_not_authenticated_test.exs | cgorshing/guardian | b62f961e948dc518e6f767af08c2c5a9f1667702 | [
"MIT"
] | 3,135 | 2015-12-19T05:35:41.000Z | 2022-03-29T00:27:25.000Z | test/guardian/plug/ensure_not_authenticated_test.exs | cgorshing/guardian | b62f961e948dc518e6f767af08c2c5a9f1667702 | [
"MIT"
] | 536 | 2015-12-28T04:40:31.000Z | 2022-03-30T22:45:59.000Z | test/guardian/plug/ensure_not_authenticated_test.exs | cgorshing/guardian | b62f961e948dc518e6f767af08c2c5a9f1667702 | [
"MIT"
] | 451 | 2015-12-20T23:43:49.000Z | 2022-03-24T10:36:14.000Z | defmodule Guardian.Plug.EnsureNotAuthenticatedTest do
@moduledoc false
use Plug.Test
use ExUnit.Case, async: true
alias Guardian.Plug.EnsureNotAuthenticated
@resource %{id: "bobby"}
defmodule Handler do
@moduledoc false
import Plug.Conn
@behaviour Guardian.Plug.ErrorHandler
@impl Guardian.Plug.ErrorHandler
def auth_error(conn, {type, reason}, _opts) do
body = inspect({type, reason})
conn
|> send_resp(401, body)
end
end
defmodule Impl do
@moduledoc false
use Guardian,
otp_app: :guardian,
token_module: Guardian.Support.TokenModule
def subject_for_token(%{id: id}, _claims), do: {:ok, id}
def subject_for_token(%{"id" => id}, _claims), do: {:ok, id}
def resource_from_claims(%{"sub" => id}), do: {:ok, %{id: id}}
end
setup do
handler = __MODULE__.Handler
{:ok, token, claims} = __MODULE__.Impl.encode_and_sign(@resource)
{:ok, %{claims: claims, conn: conn(:get, "/"), token: token, handler: handler}}
end
describe "with a verified token" do
setup ctx do
conn =
ctx.conn
|> Guardian.Plug.put_current_token(ctx.token, [])
|> Guardian.Plug.put_current_claims(ctx.claims, [])
{:ok, %{conn: conn}}
end
test "it returns an error", ctx do
conn = EnsureNotAuthenticated.call(ctx.conn, error_handler: ctx.handler)
assert {401, _, "{:already_authenticated, :already_authenticated}"} = sent_resp(conn)
assert conn.halted
end
test "does not halt conn when option is set to false", ctx do
conn = EnsureNotAuthenticated.call(ctx.conn, error_handler: ctx.handler, halt: false)
assert {401, _, "{:already_authenticated, :already_authenticated}"} = sent_resp(conn)
refute conn.halted
end
end
describe "with no verified token" do
test "it allows the request to continue", ctx do
conn = EnsureNotAuthenticated.call(ctx.conn, error_handler: ctx.handler)
refute conn.halted
refute conn.status == 401
end
end
end
| 26.960526 | 91 | 0.661786 |
1c95dd20c70f404fe8d458d1543dd542071a9caa | 2,292 | ex | Elixir | lib/surface/live_view.ex | jprincipe/surface | 011ff48401f6b3659f03fbd6a92e7028b0173547 | [
"MIT"
] | null | null | null | lib/surface/live_view.ex | jprincipe/surface | 011ff48401f6b3659f03fbd6a92e7028b0173547 | [
"MIT"
] | null | null | null | lib/surface/live_view.ex | jprincipe/surface | 011ff48401f6b3659f03fbd6a92e7028b0173547 | [
"MIT"
] | null | null | null | defmodule Surface.LiveView do
@moduledoc """
A wrapper component around `Phoenix.LiveView`.
Since this module is just a wrapper around `Phoenix.LiveView`, you
cannot define custom properties for it. Only `:id` and `:session`
are available. However, built-in directives like `:for` and `:if`
can be used normally.
## Example
defmodule Example do
use Surface.LiveView
def render(assigns) do
~H"\""
<Dialog title="Alert" id="dialog">
This <b>Dialog</b> is a stateful component. Cool!
</Dialog>
<Button click="show_dialog">Click to open the dialog</Button>
"\""
end
def handle_event("show_dialog", _, socket) do
Dialog.show("dialog")
{:noreply, socket}
end
end
"""
defmacro __using__(opts) do
quote do
use Surface.BaseComponent, type: unquote(__MODULE__)
use Surface.API, include: [:prop, :data]
import Phoenix.HTML
alias Surface.Constructs.{For, If}
alias Surface.Components.Context
@before_compile Surface.Renderer
@before_compile unquote(__MODULE__)
@doc "The id of the live view"
prop id, :string, required: true
@doc """
The request info necessary for the view, such as params, cookie session info, etc.
The session is signed and stored on the client, then provided back to the server
when the client connects, or reconnects to the stateful view.
"""
prop session, :map
use Phoenix.LiveView, unquote(opts)
end
end
defmacro __before_compile__(env) do
quoted_mount(env)
end
defp quoted_mount(env) do
defaults = env.module |> Surface.API.get_defaults() |> Macro.escape()
if Module.defines?(env.module, {:mount, 3}) do
quote do
defoverridable mount: 3
def mount(params, session, socket) do
socket =
socket
|> Surface.init()
|> assign(unquote(defaults))
super(params, session, socket)
end
end
else
quote do
def mount(_params, _session, socket) do
{:ok,
socket
|> Surface.init()
|> assign(unquote(defaults))}
end
end
end
end
end
| 24.913043 | 88 | 0.602094 |
1c9609c7ae4b1df74ea93292e78dd5e7bd861dfd | 2,504 | exs | Elixir | test/acceptance/food_selection_test.exs | neilfulwiler/open_pantry | 4b705f2282c7b2365a784503c9f1bdd34c741798 | [
"MIT"
] | 41 | 2017-10-04T00:33:46.000Z | 2021-04-09T01:33:34.000Z | test/acceptance/food_selection_test.exs | openpantry/open_pantry | 27d898a65dd6f44b325f48d41bc448bb486d9c6f | [
"MIT"
] | 74 | 2017-09-20T03:36:17.000Z | 2018-11-20T20:46:16.000Z | test/acceptance/food_selection_test.exs | neilfulwiler/open_pantry | 4b705f2282c7b2365a784503c9f1bdd34c741798 | [
"MIT"
] | 12 | 2017-10-04T10:02:49.000Z | 2021-12-28T22:57:20.000Z | defmodule OpenPantry.FoodSelectionTest do
use OpenPantry.Web.AcceptanceCase, async: true
import OpenPantry.CompleteFacility
import OpenPantry.Web.UserSelectionView, only: [login_token: 1]
import OpenPantry.Web.DisplayLogic, only: [dasherize: 1]
import Wallaby.Query, only: [css: 2, button: 1, link: 1]
test "selection table has tab per credit type, plus meals and cart", %{session: session} do
%{credit_types: [credit_type|_]} = two_credit_facility()
session
|> visit(food_selection_url(Endpoint, :index))
|> assert_has(css(".#{dasherize(credit_type.name)}", text: credit_type.name))
|> Wallaby.end_session
end
test "selection table shows first foods in stock on load", %{session: session} do
%{credit_types: [credit_type|_], foods: [food|_]} = two_credit_facility()
session
|> resize_window(2000, 2000)
|> visit(food_selection_url(Endpoint, :index))
|> click(link(credit_type.name))
|> take_screenshot
|> assert_has(css(".#{dasherize(credit_type.name)}-stock-description", text: food.longdesc))
|> Wallaby.end_session
end
test "selection table does not show second food in stock on load", %{session: session} do
%{credit_types: [credit_type|_], foods: [_|[food2]]} = two_credit_facility()
session
|> visit(food_selection_url(Endpoint, :index))
|> refute_has(css(".#{dasherize(credit_type.name)}-stock-description", text: food2.longdesc))
|> Wallaby.end_session
end
test "selection table allows selecting second tab", %{session: session} do
%{credit_types: [_|[credit_type2]], foods: [_|[food2]]} = two_credit_facility()
session
|> visit(food_selection_url(Endpoint, :index))
|> click(link(credit_type2.name))
|> assert_has(css(".#{dasherize(credit_type2.name)}-stock-description", text: food2.longdesc))
|> Wallaby.end_session
end
@tag :pending
test "clicking + adds to cart, decrements stock quantity", %{session: session} do
%{user: user } = one_credit_facility()
session = visit(session, "/en/food_selections?login=#{login_token(user)}")
take_screenshot session
assert has?(session, stock_available(20))
assert has?(session, stock_requested(0))
click(session, button("+"))
assert has?(session, stock_available(19))
assert has?(session, stock_requested(1))
end
def stock_available(count), do: css(".js-available-quantity", text: "#{count}")
def stock_requested(count), do: css(".js-current-quantity", text: "#{count}")
end
| 37.373134 | 98 | 0.70008 |
1c960d8419921fa65fa4a4598452ee839187584d | 96 | exs | Elixir | mix_ex/test/mix_ex_test.exs | QuirkOo/secret-elixir | ff707ecc34c84b3e1e32010d0d7f23ccacf486cc | [
"MIT"
] | 1 | 2018-05-16T16:54:31.000Z | 2018-05-16T16:54:31.000Z | mix_ex/test/mix_ex_test.exs | QuirkOo/secret-elixir | ff707ecc34c84b3e1e32010d0d7f23ccacf486cc | [
"MIT"
] | null | null | null | mix_ex/test/mix_ex_test.exs | QuirkOo/secret-elixir | ff707ecc34c84b3e1e32010d0d7f23ccacf486cc | [
"MIT"
] | null | null | null | defmodule MixExTest do
use ExUnit.Case
test "the truth" do
assert 1 + 1 == 2
end
end
| 12 | 22 | 0.645833 |
1c96185f695eddbd893c74f56d51d40e1244b141 | 452 | exs | Elixir | test/models/project_test.exs | philosodad/dashex | a3662d3b95e48fd3e7433b30c8a7505accf58e77 | [
"MIT"
] | null | null | null | test/models/project_test.exs | philosodad/dashex | a3662d3b95e48fd3e7433b30c8a7505accf58e77 | [
"MIT"
] | null | null | null | test/models/project_test.exs | philosodad/dashex | a3662d3b95e48fd3e7433b30c8a7505accf58e77 | [
"MIT"
] | null | null | null | defmodule Dashex.ProjectTest do
use Dashex.ModelCase
alias Dashex.Project
@valid_attrs %{homepage: "some content", name: "some content"}
@invalid_attrs %{}
test "changeset with valid attributes" do
changeset = Project.changeset(%Project{}, @valid_attrs)
assert changeset.valid?
end
test "changeset with invalid attributes" do
changeset = Project.changeset(%Project{}, @invalid_attrs)
refute changeset.valid?
end
end
| 23.789474 | 64 | 0.727876 |
1c961f7746f0284afde615b51db809b38389a88f | 1,770 | exs | Elixir | test/ua_inspector/config_test.exs | elixir-twister/ua_inspector | 70642c66c9562d6879fd462e2f9993edb8ab4ce1 | [
"Apache-2.0"
] | null | null | null | test/ua_inspector/config_test.exs | elixir-twister/ua_inspector | 70642c66c9562d6879fd462e2f9993edb8ab4ce1 | [
"Apache-2.0"
] | null | null | null | test/ua_inspector/config_test.exs | elixir-twister/ua_inspector | 70642c66c9562d6879fd462e2f9993edb8ab4ce1 | [
"Apache-2.0"
] | null | null | null | defmodule UAInspector.ConfigTest do
use ExUnit.Case, async: false
alias UAInspector.Config
setup do
app_path = Application.get_env(:ua_inspector, :database_path)
on_exit fn ->
Application.put_env(:ua_inspector, :database_path, app_path)
end
end
test "application configuration" do
path = "/configuration/by/application/configuration"
url = "http://some.host/path/to/database"
Application.put_env(:ua_inspector, :database_path, path)
Application.put_env(:ua_inspector, :remote_path, [ foo: url ])
assert path == Config.database_path
assert "#{ url }/bar.yml" == Config.database_url(:foo, "bar.yml")
end
test "system environment configuration" do
path = "/configuration/by/system/environment"
var = "UA_INSPECTOR_CONFIG_TEST"
Application.put_env(:ua_inspector, :database_path, { :system, var })
System.put_env(var, path)
assert path == Config.database_path
end
test "missing configuration" do
Application.put_env(:ua_inspector, :database_path, nil)
assert nil == Config.database_path
end
test "deep key access" do
Application.put_env(:ua_inspector, :test_deep, [ deep: [ foo: :bar ]])
assert [ deep: [ foo: :bar ]] == Config.get([ :test_deep])
assert :bar == Config.get([ :test_deep, :deep, :foo ])
assert :moep == Config.get([ :unknown, :deep ], :moep)
end
test "nested system environment access" do
var = "UA_INSPECTOR_NESTED_CONFIG"
val = "very-nested"
System.put_env(var, val)
Application.put_env(:ua_inspector, :test_only, deep: { :system, var })
assert [ deep: val ] == Config.get(:test_only)
assert val == Config.get([ :test_only, :deep ])
Application.delete_env(:ua_inspector, :test_only)
end
end
| 26.029412 | 74 | 0.682486 |
1c9632dde083f5b3aa21dcdbbde94022935cc7df | 2,291 | ex | Elixir | apps/local_hex_web/lib/local_hex_web/telemetry.ex | FrancisMurillo/local_hex_mirror | 005ddaeeb1a004bc44fde92c8ede64e8a399f2c5 | [
"MIT"
] | null | null | null | apps/local_hex_web/lib/local_hex_web/telemetry.ex | FrancisMurillo/local_hex_mirror | 005ddaeeb1a004bc44fde92c8ede64e8a399f2c5 | [
"MIT"
] | null | null | null | apps/local_hex_web/lib/local_hex_web/telemetry.ex | FrancisMurillo/local_hex_mirror | 005ddaeeb1a004bc44fde92c8ede64e8a399f2c5 | [
"MIT"
] | null | null | null | defmodule LocalHexWeb.Telemetry do
use Supervisor
import Telemetry.Metrics
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
@impl true
def init(_arg) do
children = [
# Telemetry poller will execute the given period measurements
# every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
# Add reporters as children of your supervision tree.
# {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
]
Supervisor.init(children, strategy: :one_for_one)
end
def metrics do
[
# Phoenix Metrics
summary("phoenix.endpoint.stop.duration",
unit: {:native, :millisecond}
),
summary("phoenix.router_dispatch.stop.duration",
tags: [:route],
unit: {:native, :millisecond}
),
# Database Metrics
summary("local_hex.repo.query.total_time",
unit: {:native, :millisecond},
description: "The sum of the other measurements"
),
summary("local_hex.repo.query.decode_time",
unit: {:native, :millisecond},
description: "The time spent decoding the data received from the database"
),
summary("local_hex.repo.query.query_time",
unit: {:native, :millisecond},
description: "The time spent executing the query"
),
summary("local_hex.repo.query.queue_time",
unit: {:native, :millisecond},
description: "The time spent waiting for a database connection"
),
summary("local_hex.repo.query.idle_time",
unit: {:native, :millisecond},
description:
"The time the connection spent waiting before being checked out for the query"
),
# VM Metrics
summary("vm.memory.total", unit: {:byte, :kilobyte}),
summary("vm.total_run_queue_lengths.total"),
summary("vm.total_run_queue_lengths.cpu"),
summary("vm.total_run_queue_lengths.io")
]
end
defp periodic_measurements do
[
# A module, function and arguments to be invoked periodically.
# This function must call :telemetry.execute/3 and a metric must be added above.
# {LocalHexWeb, :count_users, []}
]
end
end
| 31.819444 | 88 | 0.655172 |
1c96335cb71ca3245be3b295065243287d648642 | 11,145 | exs | Elixir | test/scenic/component/input/text_field_test.exs | bruceme/scenic | bd8a1e63c122c44cc263e1fb5dfab2547ce8ef43 | [
"Apache-2.0"
] | null | null | null | test/scenic/component/input/text_field_test.exs | bruceme/scenic | bd8a1e63c122c44cc263e1fb5dfab2547ce8ef43 | [
"Apache-2.0"
] | null | null | null | test/scenic/component/input/text_field_test.exs | bruceme/scenic | bd8a1e63c122c44cc263e1fb5dfab2547ce8ef43 | [
"Apache-2.0"
] | null | null | null | #
# Created by Boyd Multerer on 2018-09-18.
# Rewritten by Boyd Multerer on 2021-05-23
# Copyright © 2018-2021 Kry10 Limited. All rights reserved.
#
defmodule Scenic.Component.Input.TextFieldTest do
use ExUnit.Case, async: false
doctest Scenic.Component.Input.TextField
alias Scenic.Graph
alias Scenic.Scene
alias Scenic.ViewPort.Input
# import IEx
@initial_value "Initial value"
# @initial_password "*************"
# @hint "Hint String"
@press_in {:cursor_button, {:btn_left, 1, [], {14, 10}}}
@press_move {:cursor_button, {:btn_left, 1, [], {43, 10}}}
@press_out {:cursor_button, {:btn_left, 1, [], {1000, 1000}}}
@cp_k {:codepoint, {"k", []}}
@cp_l {:codepoint, {"l", []}}
@key_right {:key, {:key_right, 1, []}}
@key_left {:key, {:key_left, 1, []}}
@key_page_up {:key, {:key_pageup, 1, []}}
@key_page_down {:key, {:key_pagedown, 1, []}}
@key_home {:key, {:key_home, 1, []}}
@key_end {:key, {:key_end, 1, []}}
@key_backspace {:key, {:key_backspace, 1, []}}
@key_delete {:key, {:key_delete, 1, []}}
defmodule TestScene do
use Scenic.Scene
import Scenic.Components
def graph(text) do
Graph.build()
|> text_field(text, id: :text_field, hint: "Hint")
|> text_field("", id: :number_field, filter: :number, t: {0, 40})
|> text_field("", id: :integer_field, filter: :integer, t: {0, 80})
|> text_field("", id: :abcdefg_field, filter: "abcdefg", t: {0, 120})
|> text_field("", id: :fn_field, t: {0, 160}, filter: fn char -> "hjkl" =~ char end)
|> text_field("", id: :password_field, t: {0, 200}, type: :password)
end
@impl Scenic.Scene
def init(scene, {pid, text}, _opts) do
scene =
scene
|> assign(pid: pid)
|> push_graph(graph(text))
Process.send(pid, {:up, scene}, [])
{:ok, scene}
end
@impl Scenic.Scene
def handle_event(event, _from, %{assigns: %{pid: pid}} = scene) do
send(pid, {:fwd_event, event})
{:noreply, scene}
end
end
setup do
out = Scenic.Test.ViewPort.start({TestScene, {self(), @initial_value}})
# wait for a signal that the scene is up before proceeding
{:ok, scene} =
receive do
{:up, scene} -> {:ok, scene}
end
# make sure the button is up
{:ok, [pid]} = Scene.child(scene, :text_field)
:_pong_ = GenServer.call(pid, :_ping_)
# needed to give time for the pid and vp to close
on_exit(fn -> Process.sleep(1) end)
out
|> Map.put(:scene, scene)
|> Map.put(:pid, pid)
end
defp force_sync(vp_pid, scene_pid) do
:_pong_ = GenServer.call(vp_pid, :_ping_)
:_pong_ = GenServer.call(scene_pid, :_ping_)
:_pong_ = GenServer.call(vp_pid, :_ping_)
end
test "press_in captures and starts editing", %{vp: vp, pid: pid} do
assert Input.fetch_captures!(vp) == {:ok, []}
Input.send(vp, @press_in)
force_sync(vp.pid, pid)
assert Input.fetch_captures!(vp) == {:ok, [:codepoint, :cursor_button, :key]}
Input.send(vp, @cp_k)
assert_receive({:fwd_event, {:value_changed, :text_field, "kInitial value"}}, 200)
end
test "press_out releases and ends editing", %{vp: vp, pid: pid} do
Input.send(vp, @press_in)
force_sync(vp.pid, pid)
assert Input.fetch_captures!(vp) == {:ok, [:codepoint, :cursor_button, :key]}
Input.send(vp, @press_out)
force_sync(vp.pid, pid)
assert Input.fetch_captures!(vp) == {:ok, []}
Input.send(vp, @cp_k)
refute_receive(_, 10)
end
test "pressing in the field moves the cursor to the nearst character gap", %{vp: vp, pid: pid} do
Input.send(vp, @press_in)
force_sync(vp.pid, pid)
Input.send(vp, @cp_k)
assert_receive({:fwd_event, {:value_changed, :text_field, "kInitial value"}}, 200)
Input.send(vp, @press_move)
Input.send(vp, @cp_l)
assert_receive({:fwd_event, {:value_changed, :text_field, "kInlitial value"}}, 200)
end
test "right arrow moves the cursor to the right", %{vp: vp, pid: pid} do
Input.send(vp, @press_in)
force_sync(vp.pid, pid)
Input.send(vp, @key_right)
Input.send(vp, @cp_k)
assert_receive({:fwd_event, {:value_changed, :text_field, "Iknitial value"}}, 200)
end
test "right arrow won't move past the end", %{vp: vp, pid: pid} do
Input.send(vp, @press_in)
force_sync(vp.pid, pid)
Enum.each(1..20, fn _ -> Input.send(vp, @key_right) end)
Input.send(vp, @cp_k)
assert_receive({:fwd_event, {:value_changed, :text_field, "Initial valuek"}}, 200)
end
test "left arrow moves the cursor left", %{vp: vp, pid: pid} do
Input.send(vp, @press_in)
force_sync(vp.pid, pid)
Enum.each(1..5, fn _ -> Input.send(vp, @key_right) end)
Input.send(vp, @key_left)
Input.send(vp, @cp_k)
assert_receive({:fwd_event, {:value_changed, :text_field, "Initkial value"}}, 200)
end
test "left arrow won't move past the start", %{vp: vp, pid: pid} do
Input.send(vp, @press_in)
force_sync(vp.pid, pid)
Enum.each(1..20, fn _ -> Input.send(vp, @key_left) end)
Input.send(vp, @cp_k)
assert_receive({:fwd_event, {:value_changed, :text_field, "kInitial value"}}, 200)
end
test "home and end move to the extends of the field", %{vp: vp, pid: pid} do
Input.send(vp, @press_in)
force_sync(vp.pid, pid)
Input.send(vp, @key_end)
Input.send(vp, @cp_k)
Input.send(vp, @key_home)
Input.send(vp, @cp_l)
assert_receive({:fwd_event, {:value_changed, :text_field, "lInitial valuek"}}, 200)
end
test "page_up and page_down move to the extends of the field", %{vp: vp, pid: pid} do
Input.send(vp, @press_in)
force_sync(vp.pid, pid)
Input.send(vp, @key_page_down)
Input.send(vp, @cp_k)
Input.send(vp, @key_page_up)
Input.send(vp, @cp_l)
assert_receive({:fwd_event, {:value_changed, :text_field, "lInitial valuek"}}, 200)
end
test "backspace removes characters backwards", %{vp: vp, pid: pid} do
Input.send(vp, @press_in)
force_sync(vp.pid, pid)
Enum.each(1..5, fn _ -> Input.send(vp, @key_right) end)
Input.send(vp, @key_backspace)
assert_receive({:fwd_event, {:value_changed, :text_field, "Inital value"}}, 200)
end
test "backspace does nothing at the start of the string", %{vp: vp, pid: pid} do
Input.send(vp, @press_in)
force_sync(vp.pid, pid)
Input.send(vp, @key_backspace)
refute_receive(_, 10)
end
test "delete removes characters forwards", %{vp: vp, pid: pid} do
Input.send(vp, @press_in)
force_sync(vp.pid, pid)
Input.send(vp, @key_delete)
assert_receive({:fwd_event, {:value_changed, :text_field, "nitial value"}}, 200)
end
test "delete does nothing at the end of the field", %{vp: vp, pid: pid} do
Input.send(vp, @press_in)
force_sync(vp.pid, pid)
Input.send(vp, @key_end)
Input.send(vp, @key_delete)
refute_receive(_, 10)
end
test "filter :number works", %{vp: vp, scene: scene} do
{:ok, [pid]} = Scene.child(scene, :number_field)
:_pong_ = GenServer.call(pid, :_ping_)
Input.send(vp, {:cursor_button, {:btn_left, 1, [], {20, 60}}})
force_sync(vp.pid, pid)
Input.send(vp, {:codepoint, {"a", []}})
refute_receive(_, 10)
Input.send(vp, {:codepoint, {"1", []}})
assert_receive({:fwd_event, {:value_changed, :number_field, "1"}}, 200)
Input.send(vp, {:codepoint, {"v", []}})
refute_receive(_, 10)
Input.send(vp, {:codepoint, {".", []}})
assert_receive({:fwd_event, {:value_changed, :number_field, "1."}}, 200)
Input.send(vp, {:codepoint, {"2", []}})
assert_receive({:fwd_event, {:value_changed, :number_field, "1.2"}}, 200)
end
test "filter :integer works", %{vp: vp, scene: scene} do
{:ok, [pid]} = Scene.child(scene, :integer_field)
:_pong_ = GenServer.call(pid, :_ping_)
Input.send(vp, {:cursor_button, {:btn_left, 1, [], {14, 86}}})
force_sync(vp.pid, pid)
Input.send(vp, {:codepoint, {"a", []}})
refute_receive(_, 10)
Input.send(vp, {:codepoint, {"1", []}})
assert_receive({:fwd_event, {:value_changed, :integer_field, "1"}}, 200)
Input.send(vp, {:codepoint, {"v", []}})
refute_receive(_, 10)
Input.send(vp, {:codepoint, {".", []}})
refute_receive(_, 10)
Input.send(vp, {:codepoint, {"2", []}})
assert_receive({:fwd_event, {:value_changed, :integer_field, "12"}}, 200)
end
test "filter \"abcdefg\" works", %{vp: vp, scene: scene} do
{:ok, [pid]} = Scene.child(scene, :abcdefg_field)
:_pong_ = GenServer.call(pid, :_ping_)
Input.send(vp, {:cursor_button, {:btn_left, 1, [], {14, 121}}})
force_sync(vp.pid, pid)
Input.send(vp, {:codepoint, {"a", []}})
assert_receive({:fwd_event, {:value_changed, :abcdefg_field, "a"}}, 200)
Input.send(vp, {:codepoint, {"1", []}})
refute_receive(_, 10)
Input.send(vp, {:codepoint, {"v", []}})
refute_receive(_, 10)
Input.send(vp, {:codepoint, {"f", []}})
assert_receive({:fwd_event, {:value_changed, :abcdefg_field, "af"}}, 200)
end
test "filter fn works", %{vp: vp, scene: scene} do
{:ok, [pid]} = Scene.child(scene, :fn_field)
:_pong_ = GenServer.call(pid, :_ping_)
Input.send(vp, {:cursor_button, {:btn_left, 1, [], {14, 171}}})
force_sync(vp.pid, pid)
Input.send(vp, {:codepoint, {"a", []}})
refute_receive(_, 10)
Input.send(vp, {:codepoint, {"h", []}})
assert_receive({:fwd_event, {:value_changed, :fn_field, "h"}}, 200)
end
test "password field", %{vp: vp, scene: scene} do
{:ok, [pid]} = Scene.child(scene, :password_field)
:_pong_ = GenServer.call(pid, :_ping_)
Input.send(vp, {:cursor_button, {:btn_left, 1, [], {14, 214}}})
force_sync(vp.pid, pid)
Input.send(vp, {:codepoint, {"a", []}})
assert_receive({:fwd_event, {:value_changed, :password_field, "a"}}, 200)
Input.send(vp, {:codepoint, {"2", []}})
assert_receive({:fwd_event, {:value_changed, :password_field, "a2"}}, 200)
end
test "ignores non-main button clicks", %{vp: vp} do
Input.send(vp, {:cursor_button, {:btn_right, 1, [], {14, 10}}})
Input.send(vp, {:cursor_button, {:btn_middle, 1, [], {14, 10}}})
refute_receive(_, 10)
end
test "implements get/put", %{scene: scene} do
assert Scene.get_child(scene, :text_field) == ["Initial value"]
assert Scene.put_child(scene, :text_field, "updated") == :ok
assert Scene.get_child(scene, :text_field) == ["updated"]
end
test "implements fetch/update", %{scene: scene} do
assert Scene.fetch_child(scene, :text_field) == {:ok, ["Initial value"]}
%Scene{} = scene = Scene.update_child(scene, :text_field, "updated")
assert Scene.fetch_child(scene, :text_field) == {:ok, ["updated"]}
assert Scene.get_child(scene, :text_field) == ["updated"]
end
test "bounds works with defaults" do
graph =
Graph.build()
|> Scenic.Components.text_field("Test Field")
{0.0, 0.0, 288.0, 30.0} = Graph.bounds(graph)
end
test "bounds works with overrides" do
graph =
Graph.build()
|> Scenic.Components.text_field("Test Field", width: 300, height: 40)
{0.0, 0.0, 300.0, 40.0} = Graph.bounds(graph)
end
end
| 32.876106 | 99 | 0.621265 |
1c963938d27be31f8402586005a64abf2198ab41 | 16,933 | exs | Elixir | apps/mcam_server/test/mcam_server/accounts_test.exs | paulanthonywilson/mcam | df9c5aaae00b568749dff22613636f5cb92f905a | [
"MIT"
] | null | null | null | apps/mcam_server/test/mcam_server/accounts_test.exs | paulanthonywilson/mcam | df9c5aaae00b568749dff22613636f5cb92f905a | [
"MIT"
] | 8 | 2020-11-16T09:59:12.000Z | 2020-11-16T10:13:07.000Z | apps/mcam_server/test/mcam_server/accounts_test.exs | paulanthonywilson/mcam | df9c5aaae00b568749dff22613636f5cb92f905a | [
"MIT"
] | null | null | null | defmodule McamServer.AccountsTest do
use McamServer.DataCase
alias McamServer.Accounts
import McamServer.AccountsFixtures
alias McamServer.Accounts.{User, UserToken}
alias McamServer.Subscriptions
describe "get_user_by_email/1" do
test "does not return the user if the email does not exist" do
refute Accounts.get_user_by_email("unknown@example.com")
end
test "returns the user if the email exists" do
%{id: id} = user = user_fixture()
assert %User{id: ^id} = Accounts.get_user_by_email(user.email)
end
end
describe "get_user_by_email_and_password/2" do
test "does not return the user if the email does not exist" do
refute Accounts.get_user_by_email_and_password("unknown@example.com", "hello world!")
end
test "does not return the user if the password is not valid" do
user = user_fixture()
refute Accounts.get_user_by_email_and_password(user.email, "invalid")
end
test "returns the user if the email and password are valid" do
%{id: id} = user = user_fixture()
assert %User{id: ^id} =
Accounts.get_user_by_email_and_password(user.email, valid_user_password())
end
end
describe "get_user!/1" do
test "raises if id is invalid" do
assert_raise Ecto.NoResultsError, fn ->
Accounts.get_user!(-1)
end
end
test "returns the user with the given id" do
%{id: id} = user = user_fixture()
assert %User{id: ^id} = Accounts.get_user!(user.id)
end
end
describe "register_user/1" do
test "requires email and password to be set" do
{:error, changeset} = Accounts.register_user(%{})
assert %{
password: ["can't be blank"],
email: ["can't be blank"]
} = errors_on(changeset)
end
test "validates email and password when given" do
{:error, changeset} = Accounts.register_user(%{email: "not valid", password: "not valid"})
assert %{
email: ["must have the @ sign and no spaces"],
password: ["should be at least 12 character(s)"]
} = errors_on(changeset)
end
test "validates maximum values for email and password for security" do
too_long = String.duplicate("db", 100)
{:error, changeset} = Accounts.register_user(%{email: too_long, password: too_long})
assert "should be at most 160 character(s)" in errors_on(changeset).email
assert "should be at most 80 character(s)" in errors_on(changeset).password
end
test "validates email uniqueness" do
%{email: email} = user_fixture()
{:error, changeset} = Accounts.register_user(%{email: email})
assert "has already been taken" in errors_on(changeset).email
# Now try with the upper cased email too, to check that email case is ignored.
{:error, changeset} = Accounts.register_user(%{email: String.upcase(email)})
assert "has already been taken" in errors_on(changeset).email
end
test "registers users with a hashed password" do
email = unique_user_email()
{:ok, user} = Accounts.register_user(%{email: email, password: valid_user_password()})
assert user.email == email
assert is_binary(user.hashed_password)
assert is_nil(user.confirmed_at)
assert is_nil(user.password)
end
test "sets the user on the alpha plan if, and only if, there are less than 15 registered users" do
for _ <- 1..14, do: user_fixture()
{:ok, alpha_plan_user} =
Accounts.register_user(%{email: unique_user_email(), password: valid_user_password()})
{:ok, out_of_luck_user} =
Accounts.register_user(%{email: unique_user_email(), password: valid_user_password()})
assert {:alpha, _} = Subscriptions.camera_quota(alpha_plan_user)
assert {:none, _} = Subscriptions.camera_quota(out_of_luck_user)
end
end
describe "change_user_registration/2" do
test "returns a changeset" do
assert %Ecto.Changeset{} = changeset = Accounts.change_user_registration(%User{})
assert changeset.required == [:password, :email]
end
end
describe "change_user_email/2" do
test "returns a user changeset" do
assert %Ecto.Changeset{} = changeset = Accounts.change_user_email(%User{})
assert changeset.required == [:email]
end
end
describe "apply_user_email/3" do
setup do
%{user: user_fixture()}
end
test "requires email to change", %{user: user} do
{:error, changeset} = Accounts.apply_user_email(user, valid_user_password(), %{})
assert %{email: ["did not change"]} = errors_on(changeset)
end
test "validates email", %{user: user} do
{:error, changeset} =
Accounts.apply_user_email(user, valid_user_password(), %{email: "not valid"})
assert %{email: ["must have the @ sign and no spaces"]} = errors_on(changeset)
end
test "validates maximum value for email for security", %{user: user} do
too_long = String.duplicate("db", 100)
{:error, changeset} =
Accounts.apply_user_email(user, valid_user_password(), %{email: too_long})
assert "should be at most 160 character(s)" in errors_on(changeset).email
end
test "validates email uniqueness", %{user: user} do
%{email: email} = user_fixture()
{:error, changeset} =
Accounts.apply_user_email(user, valid_user_password(), %{email: email})
assert "has already been taken" in errors_on(changeset).email
end
test "validates current password", %{user: user} do
{:error, changeset} =
Accounts.apply_user_email(user, "invalid", %{email: unique_user_email()})
assert %{current_password: ["is not valid"]} = errors_on(changeset)
end
test "applies the email without persisting it", %{user: user} do
email = unique_user_email()
{:ok, user} = Accounts.apply_user_email(user, valid_user_password(), %{email: email})
assert user.email == email
assert Accounts.get_user!(user.id).email != email
end
end
describe "deliver_update_email_instructions/3" do
setup do
%{user: user_fixture()}
end
test "sends token through notification", %{user: user} do
token =
extract_user_token(fn url ->
Accounts.deliver_update_email_instructions(user, "current@example.com", url)
end)
{:ok, token} = Base.url_decode64(token, padding: false)
assert user_token = Repo.get_by(UserToken, token: :crypto.hash(:sha256, token))
assert user_token.user_id == user.id
assert user_token.sent_to == user.email
assert user_token.context == "change:current@example.com"
end
end
describe "update_user_email/2" do
setup do
user = user_fixture(confirm?: false)
email = unique_user_email()
token =
extract_user_token(fn url ->
Accounts.deliver_update_email_instructions(%{user | email: email}, user.email, url)
end)
%{user: user, token: token, email: email}
end
test "updates the email with a valid token", %{user: user, token: token, email: email} do
assert Accounts.update_user_email(user, token) == :ok
changed_user = Repo.get!(User, user.id)
assert changed_user.email != user.email
assert changed_user.email == email
assert changed_user.confirmed_at
assert changed_user.confirmed_at != user.confirmed_at
refute Repo.get_by(UserToken, user_id: user.id)
end
test "does not update email with invalid token", %{user: user} do
assert Accounts.update_user_email(user, "oops") == :error
assert Repo.get!(User, user.id).email == user.email
assert Repo.get_by(UserToken, user_id: user.id)
end
test "does not update email if user email changed", %{user: user, token: token} do
assert Accounts.update_user_email(%{user | email: "current@example.com"}, token) == :error
assert Repo.get!(User, user.id).email == user.email
assert Repo.get_by(UserToken, user_id: user.id)
end
test "does not update email if token expired", %{user: user, token: token} do
{1, nil} = Repo.update_all(UserToken, set: [inserted_at: ~N[2020-01-01 00:00:00]])
assert Accounts.update_user_email(user, token) == :error
assert Repo.get!(User, user.id).email == user.email
assert Repo.get_by(UserToken, user_id: user.id)
end
end
describe "change_user_password/2" do
test "returns a user changeset" do
assert %Ecto.Changeset{} = changeset = Accounts.change_user_password(%User{})
assert changeset.required == [:password]
end
end
describe "update_user_password/3" do
setup do
%{user: user_fixture()}
end
test "validates password", %{user: user} do
{:error, changeset} =
Accounts.update_user_password(user, valid_user_password(), %{
password: "not valid",
password_confirmation: "another"
})
assert %{
password: ["should be at least 12 character(s)"],
password_confirmation: ["does not match password"]
} = errors_on(changeset)
end
test "validates maximum values for password for security", %{user: user} do
too_long = String.duplicate("db", 100)
{:error, changeset} =
Accounts.update_user_password(user, valid_user_password(), %{password: too_long})
assert "should be at most 80 character(s)" in errors_on(changeset).password
end
test "validates current password", %{user: user} do
{:error, changeset} =
Accounts.update_user_password(user, "invalid", %{password: valid_user_password()})
assert %{current_password: ["is not valid"]} = errors_on(changeset)
end
test "updates the password", %{user: user} do
{:ok, user} =
Accounts.update_user_password(user, valid_user_password(), %{
password: "new valid password"
})
assert is_nil(user.password)
assert Accounts.get_user_by_email_and_password(user.email, "new valid password")
end
test "deletes all tokens for the given user", %{user: user} do
_ = Accounts.generate_user_session_token(user)
{:ok, _} =
Accounts.update_user_password(user, valid_user_password(), %{
password: "new valid password"
})
refute Repo.get_by(UserToken, user_id: user.id)
end
end
describe "generate_user_session_token/1" do
setup do
%{user: user_fixture()}
end
test "generates a token", %{user: user} do
token = Accounts.generate_user_session_token(user)
assert user_token = Repo.get_by(UserToken, token: token)
assert user_token.context == "session"
# Creating the same token for another user should fail
assert_raise Ecto.ConstraintError, fn ->
Repo.insert!(%UserToken{
token: user_token.token,
user_id: user_fixture().id,
context: "session"
})
end
end
end
describe "get_user_by_session_token/1" do
setup do
user = user_fixture()
token = Accounts.generate_user_session_token(user)
%{user: user, token: token}
end
test "returns user by token", %{user: user, token: token} do
assert session_user = Accounts.get_user_by_session_token(token)
assert session_user.id == user.id
end
test "does not return user for invalid token" do
refute Accounts.get_user_by_session_token("oops")
end
test "does not return user for expired token", %{token: token} do
{1, nil} = Repo.update_all(UserToken, set: [inserted_at: ~N[2020-01-01 00:00:00]])
refute Accounts.get_user_by_session_token(token)
end
end
describe "delete_session_token/1" do
test "deletes the token" do
user = user_fixture()
token = Accounts.generate_user_session_token(user)
assert Accounts.delete_session_token(token) == :ok
refute Accounts.get_user_by_session_token(token)
end
end
describe "deliver_user_confirmation_instructions/2" do
setup do
%{user: user_fixture(confirm?: false)}
end
test "sends token through notification", %{user: user} do
token =
extract_user_token(fn url ->
Accounts.deliver_user_confirmation_instructions(user, url)
end)
{:ok, token} = Base.url_decode64(token, padding: false)
assert user_token = Repo.get_by(UserToken, token: :crypto.hash(:sha256, token))
assert user_token.user_id == user.id
assert user_token.sent_to == user.email
assert user_token.context == "confirm"
end
end
describe "confirm_user/2" do
setup do
user = user_fixture(confirm?: false)
token =
extract_user_token(fn url ->
Accounts.deliver_user_confirmation_instructions(user, url)
end)
%{user: user, token: token}
end
test "confirms the email with a valid token", %{user: user, token: token} do
assert {:ok, confirmed_user} = Accounts.confirm_user(token)
assert confirmed_user.confirmed_at
assert confirmed_user.confirmed_at != user.confirmed_at
assert Repo.get!(User, user.id).confirmed_at
refute Repo.get_by(UserToken, user_id: user.id)
end
test "does not confirm with invalid token", %{user: user} do
assert Accounts.confirm_user("oops") == :error
refute Repo.get!(User, user.id).confirmed_at
assert Repo.get_by(UserToken, user_id: user.id)
end
test "does not confirm email if token expired", %{user: user, token: token} do
{1, nil} = Repo.update_all(UserToken, set: [inserted_at: ~N[2020-01-01 00:00:00]])
assert Accounts.confirm_user(token) == :error
refute Repo.get!(User, user.id).confirmed_at
assert Repo.get_by(UserToken, user_id: user.id)
end
end
describe "deliver_user_reset_password_instructions/2" do
setup do
%{user: user_fixture()}
end
test "sends token through notification", %{user: user} do
token =
extract_user_token(fn url ->
Accounts.deliver_user_reset_password_instructions(user, url)
end)
{:ok, token} = Base.url_decode64(token, padding: false)
assert user_token = Repo.get_by(UserToken, token: :crypto.hash(:sha256, token))
assert user_token.user_id == user.id
assert user_token.sent_to == user.email
assert user_token.context == "reset_password"
end
end
describe "get_user_by_reset_password_token/1" do
setup do
user = user_fixture()
token =
extract_user_token(fn url ->
Accounts.deliver_user_reset_password_instructions(user, url)
end)
%{user: user, token: token}
end
test "returns the user with valid token", %{user: %{id: id}, token: token} do
assert %User{id: ^id} = Accounts.get_user_by_reset_password_token(token)
assert Repo.get_by(UserToken, user_id: id)
end
test "does not return the user with invalid token", %{user: user} do
refute Accounts.get_user_by_reset_password_token("oops")
assert Repo.get_by(UserToken, user_id: user.id)
end
test "does not return the user if token expired", %{user: user, token: token} do
{1, nil} = Repo.update_all(UserToken, set: [inserted_at: ~N[2020-01-01 00:00:00]])
refute Accounts.get_user_by_reset_password_token(token)
assert Repo.get_by(UserToken, user_id: user.id)
end
end
describe "reset_user_password/2" do
setup do
%{user: user_fixture()}
end
test "validates password", %{user: user} do
{:error, changeset} =
Accounts.reset_user_password(user, %{
password: "not valid",
password_confirmation: "another"
})
assert %{
password: ["should be at least 12 character(s)"],
password_confirmation: ["does not match password"]
} = errors_on(changeset)
end
test "validates maximum values for password for security", %{user: user} do
too_long = String.duplicate("db", 100)
{:error, changeset} = Accounts.reset_user_password(user, %{password: too_long})
assert "should be at most 80 character(s)" in errors_on(changeset).password
end
test "updates the password", %{user: user} do
{:ok, updated_user} = Accounts.reset_user_password(user, %{password: "new valid password"})
assert is_nil(updated_user.password)
assert Accounts.get_user_by_email_and_password(user.email, "new valid password")
end
test "deletes all tokens for the given user", %{user: user} do
_ = Accounts.generate_user_session_token(user)
{:ok, _} = Accounts.reset_user_password(user, %{password: "new valid password"})
refute Repo.get_by(UserToken, user_id: user.id)
end
end
describe "inspect/2" do
test "does not include password" do
refute inspect(%User{password: "123456"}) =~ "password: \"123456\""
end
end
end
| 34.208081 | 102 | 0.661962 |
1c964707982a9dd158ffe0a762ecbb5803a144d6 | 545 | exs | Elixir | config/prod.exs | billsaysthis/fd-events | 3316e25565f4c4bca12d697b3614283689ff6807 | [
"MIT"
] | null | null | null | config/prod.exs | billsaysthis/fd-events | 3316e25565f4c4bca12d697b3614283689ff6807 | [
"MIT"
] | null | null | null | config/prod.exs | billsaysthis/fd-events | 3316e25565f4c4bca12d697b3614283689ff6807 | [
"MIT"
] | null | null | null | use Mix.Config
# ## SSL Support
#
# To get SSL working, you will need to set:
#
# https: [port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables point to a file on
# disk for the key and cert.
config :fd_events, FdEvents.Endpoint,
url: [host: "example.com"],
http: [port: System.get_env("PORT")],
secret_key_base: "ujW4xoIpDeDlqfAa5KMv7wsNq4QBOv5WRl+6ckvtBbCSdcSW6YMSVpBpPfaYP+Ki"
config :logger,
level: :info
| 25.952381 | 85 | 0.695413 |
1c96516d77c999ee07108eca509a23f5e3a07956 | 5,286 | ex | Elixir | lib/ex_unit/lib/ex_unit/cli_formatter.ex | enokd/elixir | e39b32f235082b8a29fcb22d250c822cca98609f | [
"Apache-2.0"
] | 1 | 2015-11-12T19:23:45.000Z | 2015-11-12T19:23:45.000Z | lib/ex_unit/lib/ex_unit/cli_formatter.ex | enokd/elixir | e39b32f235082b8a29fcb22d250c822cca98609f | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/lib/ex_unit/cli_formatter.ex | enokd/elixir | e39b32f235082b8a29fcb22d250c822cca98609f | [
"Apache-2.0"
] | null | null | null | defmodule ExUnit.CLIFormatter do
@moduledoc false
use GenEvent.Behaviour
import ExUnit.Formatter, only: [format_time: 2, format_filters: 2, format_test_failure: 5, format_test_case_failure: 4]
defrecord Config, tests_counter: 0, invalids_counter: 0, failures_counter: 0,
skips_counter: 0, trace: false, color: true, previous: nil,
seed: nil
## Callbacks
def init(opts) do
print_filters(Keyword.take(opts, [:include, :exclude]))
{ :ok, Config.new(opts) }
end
def handle_event({ :suite_finished, run_us, load_us }, config = Config[]) do
print_suite(config, run_us, load_us)
:remove_handler
end
def handle_event({ :test_started, ExUnit.Test[] = test }, config = Config[]) do
if config.trace, do: IO.write(" * #{trace_test_name test}")
{ :ok, config }
end
def handle_event({ :test_finished, ExUnit.Test[state: :passed] = test }, config = Config[]) do
if config.trace do
IO.puts success(trace_test_result(test), config)
else
IO.write success(".", config)
end
{ :ok, config.previous(:passed).update_tests_counter(&(&1 + 1)) }
end
def handle_event({ :test_finished, ExUnit.Test[state: { :invalid, _ }] = test }, config = Config[]) do
if config.trace do
IO.puts invalid(trace_test_result(test), config)
else
IO.write invalid("?", config)
end
{ :ok, config.previous(:invalid).update_tests_counter(&(&1 + 1))
.update_invalids_counter(&(&1 + 1)) }
end
def handle_event({ :test_finished, ExUnit.Test[state: { :skip, _ }] }, config = Config[]) do
{ :ok, config.previous(:skip).update_skips_counter(&(&1 + 1)) }
end
def handle_event({ :test_finished, test }, config = Config[]) do
if config.trace do
IO.puts failure(trace_test_result(test), config)
end
config = print_test_failure(test, config)
{ :ok, config.update_tests_counter(&(&1 + 1)) }
end
def handle_event({ :case_started, ExUnit.TestCase[name: name] }, config = Config[]) do
if config.trace do
IO.puts("\n#{inspect name}")
end
{ :ok, config }
end
def handle_event({ :case_finished, test_case }, config = Config[]) do
if test_case.state != :passed do
config = print_test_case_failure(test_case, config)
{ :ok, config }
else
{ :ok, config }
end
end
def handle_event(_, config) do
{ :ok, config }
end
defp trace_test_result(test) do
"\r * #{trace_test_name test} (#{trace_test_time(test)})"
end
defp trace_test_name(ExUnit.Test[name: name]) do
case atom_to_binary(name) do
"test_" <> rest -> rest
"test " <> rest -> rest
rest -> rest
end
end
defp trace_test_time(ExUnit.Test[time: time]) do
"#{format_us(time)}ms"
end
defp format_us(us) do
us = div(us, 10)
if us < 10 do
"0.0#{us}"
else
us = div us, 10
"#{div(us, 10)}.#{rem(us, 10)}"
end
end
defp print_suite(config = Config[], run_us, load_us) do
IO.write "\n\n"
IO.puts format_time(run_us, load_us)
message = "#{config.tests_counter} tests, #{config.failures_counter} failures"
if config.invalids_counter > 0 do
message = message <> ", #{config.invalids_counter} invalid"
end
cond do
config.failures_counter > 0 -> IO.puts failure(message, config)
config.invalids_counter > 0 -> IO.puts invalid(message, config)
true -> IO.puts success(message, config)
end
IO.puts "\nRandomized with seed #{config.seed}"
end
defp print_filters([include: [], exclude: []]) do
:ok
end
defp print_filters([include: include, exclude: exclude]) do
if include != [], do: IO.puts format_filters(include, :include)
if exclude != [], do: IO.puts format_filters(exclude, :exclude)
IO.puts("")
:ok
end
defp print_test_failure(ExUnit.Test[name: name, case: mod, state: { :failed, tuple }], config) do
formatted = format_test_failure(mod, name, tuple, config.failures_counter + 1, &formatter(&1, &2, config))
print_any_failure formatted, config
end
defp print_test_case_failure(ExUnit.TestCase[name: name, state: { :failed, tuple }], config) do
formatted = format_test_case_failure(name, tuple, config.failures_counter + 1, &formatter(&1, &2, config))
print_any_failure formatted, config
end
defp print_any_failure(formatted, config = Config[]) do
cond do
config.trace -> IO.puts ""
config.previous != :failed -> IO.puts "\n"
true -> :ok
end
IO.puts formatted
config.update_failures_counter(&(&1 + 1)).previous(:failed)
end
# Color styles
defp colorize(escape, string, Config[color: color]) do
IO.ANSI.escape_fragment("%{#{escape}}", color)
<> string
<> IO.ANSI.escape_fragment("%{reset}", color)
end
defp success(msg, config) do
colorize("green", msg, config)
end
defp invalid(msg, config) do
colorize("yellow", msg, config)
end
defp failure(msg, config) do
colorize("red", msg, config)
end
defp formatter(:error_info, msg, config), do: colorize("red", msg, config)
defp formatter(:location_info, msg, config), do: colorize("cyan", msg, config)
defp formatter(_, msg, _config), do: msg
end
| 28.885246 | 121 | 0.639425 |
1c96a59936ffbdc168b75559e1e57f75f68916b8 | 63 | ex | Elixir | lib/sanbase_web/views/layout_view.ex | santiment/sanbase2 | 9ef6e2dd1e377744a6d2bba570ea6bd477a1db31 | [
"MIT"
] | 81 | 2017-11-20T01:20:22.000Z | 2022-03-05T12:04:25.000Z | lib/sanbase_web/views/layout_view.ex | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | 359 | 2017-10-15T14:40:53.000Z | 2022-01-25T13:34:20.000Z | lib/sanbase_web/views/layout_view.ex | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | 16 | 2017-11-19T13:57:40.000Z | 2022-02-07T08:13:02.000Z | defmodule SanbaseWeb.LayoutView do
use SanbaseWeb, :view
end
| 15.75 | 34 | 0.809524 |
1c96a9a65528eb8552d0a04dbf07896ca70f03dc | 3,123 | exs | Elixir | test/groupher_server/accounts/published/published_repos_test.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 240 | 2018-11-06T09:36:54.000Z | 2022-02-20T07:12:36.000Z | test/groupher_server/accounts/published/published_repos_test.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 363 | 2018-07-11T03:38:14.000Z | 2021-12-14T01:42:40.000Z | test/groupher_server/accounts/published/published_repos_test.exs | mydearxym/mastani_server | f24034a4a5449200165cf4a547964a0961793eab | [
"Apache-2.0"
] | 22 | 2019-01-27T11:47:56.000Z | 2021-02-28T13:17:52.000Z | defmodule GroupherServer.Test.Accounts.Published.Repo do
use GroupherServer.TestTools
alias GroupherServer.{Accounts, CMS}
alias Accounts.Model.User
alias Helper.ORM
@publish_count 10
setup do
{:ok, user} = db_insert(:user)
{:ok, user2} = db_insert(:user)
{:ok, repo} = db_insert(:repo)
{:ok, community} = db_insert(:community)
{:ok, community2} = db_insert(:community)
{:ok, ~m(user user2 repo community community2)a}
end
describe "[publised repos]" do
test "create repo should update user published meta", ~m(community user)a do
repo_attrs = mock_attrs(:repo, %{community_id: community.id})
{:ok, _repo} = CMS.create_article(community, :repo, repo_attrs, user)
{:ok, _repo} = CMS.create_article(community, :repo, repo_attrs, user)
{:ok, user} = ORM.find(User, user.id)
assert user.meta.published_repos_count == 2
end
test "fresh user get empty paged published repos", ~m(user)a do
{:ok, results} = Accounts.paged_published_articles(user, :repo, %{page: 1, size: 20})
assert results |> is_valid_pagination?(:raw)
assert results.total_count == 0
end
test "user can get paged published repos", ~m(user user2 community community2)a do
pub_repos =
Enum.reduce(1..@publish_count, [], fn _, acc ->
repo_attrs = mock_attrs(:repo, %{community_id: community.id})
{:ok, repo} = CMS.create_article(community, :repo, repo_attrs, user)
acc ++ [repo]
end)
pub_repos2 =
Enum.reduce(1..@publish_count, [], fn _, acc ->
repo_attrs = mock_attrs(:repo, %{community_id: community2.id})
{:ok, repo} = CMS.create_article(community, :repo, repo_attrs, user)
acc ++ [repo]
end)
# unrelated other user
Enum.reduce(1..5, [], fn _, acc ->
repo_attrs = mock_attrs(:repo, %{community_id: community.id})
{:ok, repo} = CMS.create_article(community, :repo, repo_attrs, user2)
acc ++ [repo]
end)
{:ok, results} = Accounts.paged_published_articles(user, :repo, %{page: 1, size: 20})
assert results |> is_valid_pagination?(:raw)
assert results.total_count == @publish_count * 2
random_repo_id = pub_repos |> Enum.random() |> Map.get(:id)
random_repo_id2 = pub_repos2 |> Enum.random() |> Map.get(:id)
assert results.entries |> Enum.any?(&(&1.id == random_repo_id))
assert results.entries |> Enum.any?(&(&1.id == random_repo_id2))
end
end
describe "[publised repo comments]" do
test "can get published article comments", ~m(repo user)a do
total_count = 10
Enum.reduce(1..total_count, [], fn _, acc ->
{:ok, comment} = CMS.create_comment(:repo, repo.id, mock_comment(), user)
acc ++ [comment]
end)
filter = %{page: 1, size: 20}
{:ok, articles} = Accounts.paged_published_comments(user, :repo, filter)
entries = articles.entries
article = entries |> List.first()
assert article.article.id == repo.id
assert article.article.title == repo.title
end
end
end
| 33.223404 | 91 | 0.631124 |
1c96a9c91874f0916c5560f138ee5c6931b44119 | 919 | ex | Elixir | lib/app_web/router.ex | mzgajner/smena | 6c0243ae1e8d1cef6e8a8e240f0f6b703ea638c9 | [
"Unlicense"
] | null | null | null | lib/app_web/router.ex | mzgajner/smena | 6c0243ae1e8d1cef6e8a8e240f0f6b703ea638c9 | [
"Unlicense"
] | null | null | null | lib/app_web/router.ex | mzgajner/smena | 6c0243ae1e8d1cef6e8a8e240f0f6b703ea638c9 | [
"Unlicense"
] | null | null | null | defmodule SmenaWeb.Router do
use SmenaWeb, :router
pipeline :api do
plug :accepts, ["json"]
end
scope "/", SmenaWeb do
get "/", IndexController, :self
end
scope "/api", SmenaWeb do
pipe_through :api
resources "/punches", PunchController, only: [:index, :create]
end
# Enables LiveDashboard only for development
#
# If you want to use the LiveDashboard in production, you should put
# it behind authentication and allow only admins to access it.
# If your application does not have an admins-only section yet,
# you can use Plug.BasicAuth to set up some basic authentication
# as long as you are also using SSL (which you should anyway).
if Mix.env() in [:dev, :test] do
import Phoenix.LiveDashboard.Router
scope "/" do
pipe_through [:fetch_session, :protect_from_forgery]
live_dashboard "/dashboard", metrics: SmenaWeb.Telemetry
end
end
end
| 27.029412 | 70 | 0.700762 |
1c96c5f8443c90f1bef00cdeb1b4f6091f958995 | 1,580 | exs | Elixir | myApp/mix.exs | CaptainAwesomeDi/unnamedproject | 1b2bbdbc9774a073e70eb8fcd255339d7a36df70 | [
"MIT"
] | null | null | null | myApp/mix.exs | CaptainAwesomeDi/unnamedproject | 1b2bbdbc9774a073e70eb8fcd255339d7a36df70 | [
"MIT"
] | null | null | null | myApp/mix.exs | CaptainAwesomeDi/unnamedproject | 1b2bbdbc9774a073e70eb8fcd255339d7a36df70 | [
"MIT"
] | null | null | null | defmodule MyApp.Mixfile do
use Mix.Project
def project do
[
app: :myApp,
version: "0.0.1",
elixir: "~> 1.4",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
start_permanent: Mix.env == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {MyApp.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.3.2"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_ecto, "~> 3.2"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.10"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
"test": ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 25.901639 | 79 | 0.582278 |
1c96d866cc8e3c9d65bd8996cebd51e8f702ac0f | 674 | exs | Elixir | config/test.exs | Foxlabsdevelopers/cv_creator | c77d52cdc67180ed369a3dbd298ca2dea5131c60 | [
"MIT"
] | null | null | null | config/test.exs | Foxlabsdevelopers/cv_creator | c77d52cdc67180ed369a3dbd298ca2dea5131c60 | [
"MIT"
] | 2 | 2021-05-24T21:41:25.000Z | 2021-05-25T16:02:09.000Z | config/test.exs | Foxlabsdevelopers/cv_creator | c77d52cdc67180ed369a3dbd298ca2dea5131c60 | [
"MIT"
] | null | null | null | use Mix.Config
# Configure your database
#
# The MIX_TEST_PARTITION environment variable can be used
# to provide built-in test partitioning in CI environment.
# Run `mix help test` for more information.
config :cv_creator, CvCreator.Repo,
username: "postgres",
password: "postgres",
database: "cv_creator_test#{System.get_env("MIX_TEST_PARTITION")}",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :cv_creator, CvCreatorWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 29.304348 | 69 | 0.753709 |
1c96da1a0c3558e7563e10fda6f283e772bc7c57 | 990 | ex | Elixir | test/support/conn_case.ex | billstclair/readerl | bd81d17109a1fb207d98f1ba1c498f66b195d1ba | [
"BSD-3-Clause"
] | 3 | 2015-10-31T02:33:29.000Z | 2016-07-19T06:25:03.000Z | test/support/conn_case.ex | billstclair/readerl | bd81d17109a1fb207d98f1ba1c498f66b195d1ba | [
"BSD-3-Clause"
] | null | null | null | test/support/conn_case.ex | billstclair/readerl | bd81d17109a1fb207d98f1ba1c498f66b195d1ba | [
"BSD-3-Clause"
] | null | null | null | defmodule Readerl.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
imports other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
alias Readerl.Repo
import Ecto.Model
import Ecto.Query, only: [from: 2]
import Readerl.Router.Helpers
# The default endpoint for testing
@endpoint Readerl.Endpoint
end
end
setup tags do
unless tags[:async] do
Ecto.Adapters.SQL.restart_test_transaction(Readerl.Repo, [])
end
:ok
end
end
| 23.571429 | 66 | 0.706061 |
1c96ebeb231c9f94a8de1c3856223821d8908811 | 670 | ex | Elixir | lib/chat_api_web/views/group_message_view.ex | vbrazo/chat_elixir_api | f2595d123934014b3a7c1b3b63b987fa3df5b3ad | [
"MIT"
] | 2 | 2021-04-30T08:30:26.000Z | 2021-04-30T16:20:39.000Z | lib/chat_api_web/views/group_message_view.ex | vbrazo/chat_elixir_api | f2595d123934014b3a7c1b3b63b987fa3df5b3ad | [
"MIT"
] | null | null | null | lib/chat_api_web/views/group_message_view.ex | vbrazo/chat_elixir_api | f2595d123934014b3a7c1b3b63b987fa3df5b3ad | [
"MIT"
] | null | null | null | defmodule ChatApiWeb.GroupMessageView do
use ChatApiWeb, :view
alias ChatApiWeb.{GroupMessageView, FormatHelpers}
def render("show.json", %{group_message: group_message}) do
%{group_message: render_one(group_message, GroupMessageView, "group_message.json")}
end
def render("group_message.json", %{group_message: group_message}) do
group_message
|> Map.from_struct()
|> Map.take([
:id,
:message,
:group_id,
:user_id
])
|> FormatHelpers.camelize()
end
defp datetime_to_iso8601(datetime) do
datetime
|> Map.put(:microsecond, {elem(datetime.microsecond, 0), 3})
|> DateTime.to_iso8601()
end
end
| 24.814815 | 87 | 0.683582 |
1c96f373e183ec31f8fa413b85e966410c8e7b9b | 1,134 | exs | Elixir | elixir/config/config.exs | XelaRellum/old_password | b461941069bc7f1187776a992f86c89317ab215e | [
"MIT"
] | null | null | null | elixir/config/config.exs | XelaRellum/old_password | b461941069bc7f1187776a992f86c89317ab215e | [
"MIT"
] | null | null | null | elixir/config/config.exs | XelaRellum/old_password | b461941069bc7f1187776a992f86c89317ab215e | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
import Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :old_password, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:old_password, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env()}.exs"
| 36.580645 | 73 | 0.753086 |
1c9737a86a4c7e698fa5db110c8e37f8c7052773 | 118 | exs | Elixir | test/faker/vehicle_test.exs | joshillian/faker | eeede9d7c35c543dcf6abe72dc476e755c80415b | [
"MIT"
] | 540 | 2015-01-05T16:31:49.000Z | 2019-09-25T00:40:27.000Z | test/faker/vehicle_test.exs | joshillian/faker | eeede9d7c35c543dcf6abe72dc476e755c80415b | [
"MIT"
] | 172 | 2015-01-06T03:55:17.000Z | 2019-10-03T12:58:02.000Z | test/faker/vehicle_test.exs | joshillian/faker | eeede9d7c35c543dcf6abe72dc476e755c80415b | [
"MIT"
] | 163 | 2015-01-05T21:24:54.000Z | 2019-10-03T07:59:42.000Z | defmodule Faker.VehicleTest do
use ExUnit.Case, async: true
doctest Faker.Vehicle
doctest Faker.Vehicle.En
end
| 16.857143 | 30 | 0.779661 |
1c975082925cc9c23579882f88c1c48819919e5b | 2,392 | ex | Elixir | samples/client/petstore/elixir/lib/swagger_petstore/api/fake.ex | dangthaison91/swagger-codegen-moya | d0f3a119f2af176cfad10a80c77dc9c77c9ed22f | [
"Apache-2.0"
] | 3 | 2017-09-07T16:18:04.000Z | 2020-11-05T02:20:50.000Z | samples/client/petstore/elixir/lib/swagger_petstore/api/fake.ex | dangthaison91/swagger-codegen-moya | d0f3a119f2af176cfad10a80c77dc9c77c9ed22f | [
"Apache-2.0"
] | null | null | null | samples/client/petstore/elixir/lib/swagger_petstore/api/fake.ex | dangthaison91/swagger-codegen-moya | d0f3a119f2af176cfad10a80c77dc9c77c9ed22f | [
"Apache-2.0"
] | 5 | 2017-05-01T15:47:40.000Z | 2021-12-29T03:23:06.000Z | defmodule SwaggerPetstore.Api.Fake do
@moduledoc """
Documentation for SwaggerPetstore.Api.Fake.
"""
use Tesla
plug Tesla.Middleware.BaseUrl, "http://petstore.swagger.io/v2"
plug Tesla.Middleware.JSON
def test_client_model(body) do
method = [method: :patch]
url = [url: "/fake"]
query_params = []
header_params = []
body_params = [body: body]
form_params = []
params = query_params ++ header_params ++ body_params ++ form_params
opts = []
options = method ++ url ++ params ++ opts
request(options)
end
def test_endpoint_parameters(number, double, pattern_without_delimiter, byte, integer, int32, int64, float, string, binary, date, date_time, password, callback) do
method = [method: :post]
url = [url: "/fake"]
query_params = []
header_params = []
body_params = []
form_params = [body: Enum.map_join([{:"integer", integer}, {:"int32", int32}, {:"int64", int64}, {:"number", number}, {:"float", float}, {:"double", double}, {:"string", string}, {:"pattern_without_delimiter", pattern_without_delimiter}, {:"byte", byte}, {:"binary", binary}, {:"date", date}, {:"dateTime", date_time}, {:"password", password}, {:"callback", callback}], "&", &("#{elem(&1, 0)}=#{elem(&1, 1)}"))]
params = query_params ++ header_params ++ body_params ++ form_params
opts = []
options = method ++ url ++ params ++ opts
request(options)
end
def test_enum_parameters(enum_form_string_array, enum_form_string, enum_header_string_array, enum_header_string, enum_query_string_array, enum_query_string, enum_query_integer, enum_query_double) do
method = [method: :get]
url = [url: "/fake"]
query_params = [query: [{:"enum_query_string_array", enum_query_string_array}, {:"enum_query_string", enum_query_string}, {:"enum_query_integer", enum_query_integer}]]
header_params = [header: [{:"enum_header_string_array", enum_header_string_array}, {:"enum_header_string", enum_header_string}]]
body_params = []
form_params = [body: Enum.map_join([{:"enum_form_string_array", enum_form_string_array}, {:"enum_form_string", enum_form_string}, {:"enum_query_double", enum_query_double}], "&", &("#{elem(&1, 0)}=#{elem(&1, 1)}"))]
params = query_params ++ header_params ++ body_params ++ form_params
opts = []
options = method ++ url ++ params ++ opts
request(options)
end
end
| 45.132075 | 415 | 0.674331 |
1c97541044363464af39b8619bd674899347a30d | 915 | ex | Elixir | lib/sendgrid/marketing_campaigns/contacts/recipient.ex | churcho/sendgrid_elixir | 1bb421d044e21e51bd761f14dfb61b9e431f2eb7 | [
"MIT"
] | 83 | 2016-02-28T22:41:34.000Z | 2022-03-29T12:20:53.000Z | lib/sendgrid/marketing_campaigns/contacts/recipient.ex | churcho/sendgrid_elixir | 1bb421d044e21e51bd761f14dfb61b9e431f2eb7 | [
"MIT"
] | 32 | 2016-07-05T14:11:11.000Z | 2021-12-15T05:39:58.000Z | lib/sendgrid/marketing_campaigns/contacts/recipient.ex | churcho/sendgrid_elixir | 1bb421d044e21e51bd761f14dfb61b9e431f2eb7 | [
"MIT"
] | 46 | 2016-02-29T12:39:21.000Z | 2021-06-22T13:55:14.000Z | defmodule SendGrid.Contacts.Recipient do
@moduledoc """
Struct to help with creating a recipient.
"""
alias SendGrid.Contacts.Recipient
@enforce_keys [:email]
defstruct [:custom_fields, :email]
@type t :: %Recipient{
email: String.t(),
custom_fields: nil | map()
}
@doc """
Builds a Repient to be used in `SendGrid.Contacts.Recipents`.
"""
@spec build(String.t(), map()) :: t()
def build(email, custom_fields \\ %{}) when is_map(custom_fields) do
%Recipient{
email: email,
custom_fields: custom_fields
}
end
defimpl Jason.Encoder do
def encode(%Recipient{email: email, custom_fields: fields}, opts) when is_map(fields) do
Jason.Encode.map(Map.merge(fields, %{email: email}), opts)
end
def encode(%Recipient{email: email, custom_fields: nil}, opts) do
Jason.Encode.map(%{email: email}, opts)
end
end
end
| 24.72973 | 92 | 0.645902 |
1c9754b200221142dc28a104e16c79c55c037446 | 2,977 | ex | Elixir | lib/earmark/helpers/ast_helpers.ex | ZeLarpMaster/earmark | 35c9661d6647059e507c0278347e21d92351c417 | [
"Apache-1.1"
] | null | null | null | lib/earmark/helpers/ast_helpers.ex | ZeLarpMaster/earmark | 35c9661d6647059e507c0278347e21d92351c417 | [
"Apache-1.1"
] | null | null | null | lib/earmark/helpers/ast_helpers.ex | ZeLarpMaster/earmark | 35c9661d6647059e507c0278347e21d92351c417 | [
"Apache-1.1"
] | null | null | null | defmodule Earmark.Helpers.AstHelpers do
@moduledoc false
import Earmark.Helpers
import Earmark.Helpers.AttrParser
alias Earmark.Block
@doc false
def augment_tag_with_ial(tags, ial)
def augment_tag_with_ial([{t, a, c}|tags], atts) do
[{t, merge_attrs(a, atts), c}|tags]
end
@doc false
def code_classes(language, prefix) do
classes =
["" | String.split(prefix || "")]
|> Enum.map(fn pfx -> "#{pfx}#{language}" end)
{"class", classes |> Enum.join(" ")}
end
@doc false
def codespan(text) do
{ "code", [{"class", "inline"}], [text] }
end
@doc false
def render_footnote_link(ref, backref, number) do
{"a", [{"href", "##{ref}"}, {"id", backref}, {"class", "footnote"}, {"title", "see footnote"}], [to_string(number)]}
end
@doc false
def render_code(%Block.Code{lines: lines}) do
lines |> Enum.join("\n")
end
@remove_escapes ~r{ \\ (?! \\ ) }x
@doc false
def render_image(text, href, title) do
alt = text |> escape() |> String.replace(@remove_escapes, "")
if title do
{ "img", [{"src", href}, {"alt", alt}, {"title", title}], [] }
else
{ "img", [{"src", href}, {"alt", alt}], [] }
end
end
@doc false
def render_link(url, text), do: {"a", [{"href", url}], [text]}
##############################################
# add attributes to the outer tag in a block #
##############################################
@doc false
def merge_attrs(atts, default \\ %{})
def merge_attrs(nil, default) do
merge_attrs(%{}, default)
end
def merge_attrs(atts, new) when is_list(atts) do
atts
|> Enum.into(%{})
|> merge_attrs(new)
end
def merge_attrs(atts, new) do
atts
|> Map.merge(new, &_value_merger/3)
|> Enum.into([])
|> Enum.map(&attrs_to_string_keys/1)
end
@doc false
def add_attrs(context, text, attrs_as_string_or_map, default_attrs, lnb)
def add_attrs(context, text, nil, [], _lnb), do: {context, text}
def add_attrs(context, text, nil, default, lnb), do: add_attrs(context, text, %{}, default, lnb)
def add_attrs(context, text, attrs, default, lnb) when is_binary(attrs) do
{context1, attrs} = parse_attrs( context, attrs, lnb )
add_attrs(context1, text, attrs, default, lnb)
end
def add_attrs(_context, _text, attrs, default, _lnb) do
default
|> Map.new()
|> Map.merge(attrs, fn _k, v1, v2 -> v1 ++ v2 end)
end
defp attrs_to_string_keys(key_value_pair)
defp attrs_to_string_keys({k, vs}) when is_list(vs) do
{to_string(k), Enum.join(vs, " ")}
end
defp attrs_to_string_keys({k, vs}) do
{to_string(k),to_string(vs)}
end
defp _value_merger(key, val1, val2)
defp _value_merger(_, val1, val2) when is_list(val1) and is_list(val2) do
val1 ++ val2
end
defp _value_merger(_, val1, val2) when is_list(val1) do
val1 ++ [val2]
end
defp _value_merger(_, val1, val2) do
[val1, val2]
end
end
# SPDX-License-Identifier: Apache-2.0
| 26.580357 | 120 | 0.603628 |
1c9757ae0b25d78fd33e7b9971bce8a3c0c20667 | 2,254 | ex | Elixir | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/api/express_route_circuit_routes_table.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | 4 | 2018-09-29T03:43:15.000Z | 2021-04-01T18:30:46.000Z | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/api/express_route_circuit_routes_table.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | Microsoft.Azure.Management.Network/lib/microsoft/azure/management/network/api/express_route_circuit_routes_table.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule Microsoft.Azure.Management.Network.Api.ExpressRouteCircuitRoutesTable do
@moduledoc """
API calls for all endpoints tagged `ExpressRouteCircuitRoutesTable`.
"""
alias Microsoft.Azure.Management.Network.Connection
import Microsoft.Azure.Management.Network.RequestBuilder
@doc """
Gets the currently advertised routes table associated with the express route circuit in a resource group.
## Parameters
- connection (Microsoft.Azure.Management.Network.Connection): Connection to server
- resource_group_name (String.t): The name of the resource group.
- circuit_name (String.t): The name of the express route circuit.
- peering_name (String.t): The name of the peering.
- device_path (String.t): The path of the device.
- api_version (String.t): Client API version.
- subscription_id (String.t): The subscription credentials which uniquely identify the Microsoft Azure subscription. The subscription ID forms part of the URI for every service call.
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %Microsoft.Azure.Management.Network.Model.ExpressRouteCircuitsRoutesTableListResult{}} on success
{:error, info} on failure
"""
@spec express_route_circuits_list_routes_table(Tesla.Env.client, String.t, String.t, String.t, String.t, String.t, String.t, keyword()) :: {:ok, Microsoft.Azure.Management.Network.Model.ExpressRouteCircuitsRoutesTableListResult.t} | {:error, Tesla.Env.t}
def express_route_circuits_list_routes_table(connection, resource_group_name, circuit_name, peering_name, device_path, api_version, subscription_id, _opts \\ []) do
%{}
|> method(:post)
|> url("/subscriptions/#{subscription_id}/resourceGroups/#{resource_group_name}/providers/Microsoft.Network/expressRouteCircuits/#{circuit_name}/peerings/#{peering_name}/routeTables/#{device_path}")
|> add_param(:query, :"api-version", api_version)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%Microsoft.Azure.Management.Network.Model.ExpressRouteCircuitsRoutesTableListResult{})
end
end
| 51.227273 | 256 | 0.765306 |
1c9765e3b092cfc381ac4695d47e69e03b18595a | 17,303 | ex | Elixir | lib/mix/lib/mix/tasks/format.ex | NJichev/elixir | aef81d1aadfd9522ab4efb6d04103a73584ac9a1 | [
"Apache-2.0"
] | 2 | 2018-11-15T06:38:14.000Z | 2018-11-17T18:03:14.000Z | lib/mix/lib/mix/tasks/format.ex | NJichev/elixir | aef81d1aadfd9522ab4efb6d04103a73584ac9a1 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/format.ex | NJichev/elixir | aef81d1aadfd9522ab4efb6d04103a73584ac9a1 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Format do
use Mix.Task
@shortdoc "Formats the given files/patterns"
@moduledoc """
Formats the given files and patterns.
mix format mix.exs "lib/**/*.{ex,exs}" "test/**/*.{ex,exs}"
If any of the files is `-`, then the output is read from stdin
and written to stdout.
## Formatting options
The formatter will read a `.formatter.exs` in the current directory for
formatter configuration. Evaluating this file should return a keyword list.
Here is an example `.formatter.exs` that works as a starting point:
[
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"]
]
Besides the options listed in `Code.format_string!/2`, the `.formatter.exs`
supports the following options:
* `:inputs` (a list of paths and patterns) - specifies the default inputs
to be used by this task. For example, `["mix.exs", "{config,lib,test}/**/*.{ex,exs}"]`.
Patterns are expanded with `Path.wildcard/2`.
* `:subdirectories` (a list of paths and patterns) - specifies subdirectories
that have their own formatting rules. Each subdirectory should have a
`.formatter.exs` that configures how entries in that subdirectory should be
formatted as. Configuration between `.formatter.exs` are not shared nor
inherited. If a `.formatter.exs` lists "lib/app" as a subdirectory, the rules
in `.formatter.exs` won't be available in `lib/app/.formatter.exs`.
Note that the parent `.formatter.exs` must not specify files inside the "lib/app"
subdirectory in its `:inputs` configuration. If this happens, the behaviour of
which formatter configuration will be picked is unspecified.
* `:import_deps` (a list of dependencies as atoms) - specifies a list
of dependencies whose formatter configuration will be imported.
When specified, the formatter should run in the same directory as
the `mix.exs` file that defines those dependencies. See the "Importing
dependencies configuration" section below for more information.
* `:export` (a keyword list) - specifies formatter configuration to be exported.
See the "Importing dependencies configuration" section below.
## Task-specific options
* `--check-formatted` - checks that the file is already formatted.
This is useful in pre-commit hooks and CI scripts if you want to
reject contributions with unformatted code. However keep in mind
that the formatted output may differ between Elixir versions as
improvements and fixes are applied to the formatter.
* `--check-equivalent` - checks if the files after formatting have the
same AST as before formatting. If the ASTs are not equivalent, it is
a bug in the code formatter. This option is useful if you suspect you
have ran into a formatter bug and you would like confirmation.
* `--dry-run` - does not save files after formatting.
* `--dot-formatter` - path to the file with formatter configuration.
Defaults to `.formatter.exs` if one is available. See the "`.formatter.exs`"
section for more information.
If any of the `--check-*` flags are given and a check fails, the formatted
contents won't be written to disk nor printed to standard output.
## When to format code
We recommend developers to format code directly in their editors, either
automatically when saving a file or via an explicit command or key binding. If
such option is not yet available in your editor of choice, adding the required
integration is usually a matter of invoking:
cd $project && mix format $file
where `$file` refers to the current file and `$project` is the root of your
project.
It is also possible to format code across the whole project by passing a list
of patterns and files to `mix format`, as shown at the top of this task
documentation. This list can also be set in the `.formatter.exs` under the
`:inputs` key.
## Importing dependencies configuration
This task supports importing formatter configuration from dependencies.
A dependency that wants to export formatter configuration needs to have a
`.formatter.exs` file at the root of the project. In this file, the dependency
can export a `:export` option with configuration to export. For now, only one
option is supported under `:export`: `:locals_without_parens` (whose value has
the same shape as the value of the `:locals_without_parens` in `Code.format_string!/2`).
The functions listed under `:locals_without_parens` in the `:export` option of
a dependency can be imported in a project by listing that dependency in the
`:import_deps` option of the formatter configuration file of the project.
For example, consider I have a project `my_app` that depends on `my_dep`.
`my_dep` wants to export some configuration, so `my_dep/.formatter.exs`
would look like this:
# my_dep/.formatter.exs
[
# Regular formatter configuration for my_dep
# ...
export: [
locals_without_parens: [some_dsl_call: 2, some_dsl_call: 3]
]
]
In order to import configuration, `my_app`'s `.formatter.exs` would look like
this:
# my_app/.formatter.exs
[
import_deps: [:my_dep]
]
"""
@switches [
check_equivalent: :boolean,
check_formatted: :boolean,
dot_formatter: :string,
dry_run: :boolean
]
@manifest "cached_dot_formatter"
@manifest_vsn 1
@impl true
def run(args) do
{opts, args} = OptionParser.parse!(args, strict: @switches)
{dot_formatter, formatter_opts} = eval_dot_formatter(opts)
{formatter_opts_and_subs, _sources} =
eval_deps_and_subdirectories(dot_formatter, [], formatter_opts, [dot_formatter])
args
|> expand_args(dot_formatter, formatter_opts_and_subs)
|> Task.async_stream(&format_file(&1, opts), ordered: false, timeout: 30000)
|> Enum.reduce({[], [], []}, &collect_status/2)
|> check!()
end
@doc """
Returns formatter options to be used for the given file.
"""
def formatter_opts_for_file(file, opts \\ []) do
{dot_formatter, formatter_opts} = eval_dot_formatter(opts)
{formatter_opts_and_subs, _sources} =
eval_deps_and_subdirectories(dot_formatter, [], formatter_opts, [dot_formatter])
split = file |> Path.relative_to_cwd() |> Path.split()
find_formatter_opts_for_file(split, formatter_opts_and_subs)
end
defp eval_dot_formatter(opts) do
cond do
dot_formatter = opts[:dot_formatter] ->
{dot_formatter, eval_file_with_keyword_list(dot_formatter)}
File.regular?(".formatter.exs") ->
{".formatter.exs", eval_file_with_keyword_list(".formatter.exs")}
true ->
{".formatter.exs", []}
end
end
# This function reads exported configuration from the imported
# dependencies and subdirectories and deals with caching the result
# of reading such configuration in a manifest file.
defp eval_deps_and_subdirectories(dot_formatter, prefix, formatter_opts, sources) do
deps = Keyword.get(formatter_opts, :import_deps, [])
subs = Keyword.get(formatter_opts, :subdirectories, [])
if not is_list(deps) do
Mix.raise("Expected :import_deps to return a list of dependencies, got: #{inspect(deps)}")
end
if not is_list(subs) do
Mix.raise("Expected :subdirectories to return a list of directories, got: #{inspect(subs)}")
end
if deps == [] and subs == [] do
{{formatter_opts, []}, sources}
else
manifest = Path.join(Mix.Project.manifest_path(), @manifest)
maybe_cache_in_manifest(dot_formatter, manifest, fn ->
{subdirectories, sources} = eval_subs_opts(subs, prefix, sources)
{{eval_deps_opts(formatter_opts, deps), subdirectories}, sources}
end)
end
end
defp maybe_cache_in_manifest(dot_formatter, manifest, fun) do
cond do
is_nil(Mix.Project.get()) or dot_formatter != ".formatter.exs" -> fun.()
entry = read_manifest(manifest) -> entry
true -> write_manifest!(manifest, fun.())
end
end
def read_manifest(manifest) do
with {:ok, binary} <- File.read(manifest),
{:ok, {@manifest_vsn, entry, sources}} <- safe_binary_to_term(binary),
expanded_sources = Enum.flat_map(sources, &Path.wildcard(&1, match_dot: true)),
false <- Mix.Utils.stale?([Mix.Project.config_mtime() | expanded_sources], [manifest]) do
{entry, sources}
else
_ -> nil
end
end
defp safe_binary_to_term(binary) do
{:ok, :erlang.binary_to_term(binary)}
rescue
_ -> :error
end
defp write_manifest!(manifest, {entry, sources}) do
File.mkdir_p!(Path.dirname(manifest))
File.write!(manifest, :erlang.term_to_binary({@manifest_vsn, entry, sources}))
{entry, sources}
end
defp eval_deps_opts(formatter_opts, []) do
formatter_opts
end
defp eval_deps_opts(formatter_opts, deps) do
deps_paths = Mix.Project.deps_paths()
parenless_calls =
for dep <- deps,
dep_path = assert_valid_dep_and_fetch_path(dep, deps_paths),
dep_dot_formatter = Path.join(dep_path, ".formatter.exs"),
File.regular?(dep_dot_formatter),
dep_opts = eval_file_with_keyword_list(dep_dot_formatter),
parenless_call <- dep_opts[:export][:locals_without_parens] || [],
uniq: true,
do: parenless_call
Keyword.update(
formatter_opts,
:locals_without_parens,
parenless_calls,
&(&1 ++ parenless_calls)
)
end
defp eval_subs_opts(subs, prefix, sources) do
{subs, sources} =
Enum.flat_map_reduce(subs, sources, fn sub, sources ->
prefix = Path.join(prefix ++ [sub])
{Path.wildcard(prefix), [Path.join(prefix, ".formatter.exs") | sources]}
end)
Enum.flat_map_reduce(subs, sources, fn sub, sources ->
sub_formatter = Path.join(sub, ".formatter.exs")
if File.exists?(sub_formatter) do
formatter_opts = eval_file_with_keyword_list(sub_formatter)
{formatter_opts_and_subs, sources} =
eval_deps_and_subdirectories(:in_memory, [sub], formatter_opts, sources)
{[{sub, formatter_opts_and_subs}], sources}
else
{[], sources}
end
end)
end
defp assert_valid_dep_and_fetch_path(dep, deps_paths) when is_atom(dep) do
case Map.fetch(deps_paths, dep) do
{:ok, path} ->
if File.dir?(path) do
path
else
Mix.raise(
"Unavailable dependency #{inspect(dep)} given to :import_deps in the formatter configuration. " <>
"The dependency cannot be found in the file system, please run \"mix deps.get\" and try again"
)
end
:error ->
Mix.raise(
"Unknown dependency #{inspect(dep)} given to :import_deps in the formatter configuration. " <>
"The dependency is not listed in your mix.exs for environment #{inspect(Mix.env())}"
)
end
end
defp assert_valid_dep_and_fetch_path(dep, _deps_paths) do
Mix.raise("Dependencies in :import_deps should be atoms, got: #{inspect(dep)}")
end
defp eval_file_with_keyword_list(path) do
{opts, _} = Code.eval_file(path)
unless Keyword.keyword?(opts) do
Mix.raise("Expected #{inspect(path)} to return a keyword list, got: #{inspect(opts)}")
end
opts
end
defp expand_args([], dot_formatter, formatter_opts_and_subs) do
if no_entries_in_formatter_opts?(formatter_opts_and_subs) do
Mix.raise(
"Expected one or more files/patterns to be given to mix format " <>
"or for a .formatter.exs to exist with an :inputs or :subdirectories key"
)
end
dot_formatter
|> expand_dot_inputs([], formatter_opts_and_subs, %{})
|> Enum.map(fn {file, {_dot_formatter, formatter_opts}} -> {file, formatter_opts} end)
end
defp expand_args(files_and_patterns, _dot_formatter, {formatter_opts, subs}) do
files =
for file_or_pattern <- files_and_patterns,
file <- stdin_or_wildcard(file_or_pattern),
uniq: true,
do: file
if files == [] do
Mix.raise(
"Could not find a file to format. The files/patterns given to command line " <>
"did not point to any existing file. Got: #{inspect(files_and_patterns)}"
)
end
for file <- files do
if file == :stdin do
{file, formatter_opts}
else
split = file |> Path.relative_to_cwd() |> Path.split()
{file, find_formatter_opts_for_file(split, {formatter_opts, subs})}
end
end
end
defp expand_dot_inputs(dot_formatter, prefix, {formatter_opts, subs}, acc) do
if no_entries_in_formatter_opts?({formatter_opts, subs}) do
Mix.raise("Expected :inputs or :subdirectories key in #{dot_formatter}")
end
map =
for input <- List.wrap(formatter_opts[:inputs]),
file <- Path.wildcard(Path.join(prefix ++ [input]), match_dot: true),
do: {expand_relative_to_cwd(file), {dot_formatter, formatter_opts}},
into: %{}
acc =
Map.merge(acc, map, fn file, {dot_formatter1, _}, {dot_formatter2, formatter_opts} ->
Mix.shell().error(
"Both #{dot_formatter1} and #{dot_formatter2} specify the file " <>
"#{Path.relative_to_cwd(file)} in their :inputs option. To resolve the " <>
"conflict, the configuration in #{dot_formatter1} will be ignored. " <>
"Please change the list of :inputs in one of the formatter files so only " <>
"one of them matches #{Path.relative_to_cwd(file)}"
)
{dot_formatter2, formatter_opts}
end)
Enum.reduce(subs, acc, fn {sub, formatter_opts_and_subs}, acc ->
sub_formatter = Path.join(sub, ".formatter.exs")
expand_dot_inputs(sub_formatter, [sub], formatter_opts_and_subs, acc)
end)
end
defp expand_relative_to_cwd(path) do
case File.cwd() do
{:ok, cwd} -> Path.expand(path, cwd)
_ -> path
end
end
defp find_formatter_opts_for_file(split, {formatter_opts, subs}) do
Enum.find_value(subs, formatter_opts, fn {sub, formatter_opts_and_subs} ->
if List.starts_with?(split, Path.split(sub)) do
find_formatter_opts_for_file(split, formatter_opts_and_subs)
end
end)
end
defp no_entries_in_formatter_opts?({formatter_opts, subs}) do
is_nil(formatter_opts[:inputs]) and subs == []
end
defp stdin_or_wildcard("-"), do: [:stdin]
defp stdin_or_wildcard(path), do: path |> Path.expand() |> Path.wildcard(match_dot: true)
defp read_file(:stdin) do
{IO.stream(:stdio, :line) |> Enum.to_list() |> IO.iodata_to_binary(), file: "stdin"}
end
defp read_file(file) do
{File.read!(file), file: file}
end
defp format_file({file, formatter_opts}, task_opts) do
{input, extra_opts} = read_file(file)
output = IO.iodata_to_binary([Code.format_string!(input, extra_opts ++ formatter_opts), ?\n])
check_equivalent? = Keyword.get(task_opts, :check_equivalent, false)
check_formatted? = Keyword.get(task_opts, :check_formatted, false)
dry_run? = Keyword.get(task_opts, :dry_run, false)
cond do
check_equivalent? and not equivalent?(input, output) ->
{:not_equivalent, file}
check_formatted? ->
if input == output, do: :ok, else: {:not_formatted, file}
dry_run? ->
:ok
true ->
write_or_print(file, input, output)
end
rescue
exception ->
{:exit, file, exception, __STACKTRACE__}
end
defp write_or_print(file, input, output) do
cond do
file == :stdin -> IO.write(output)
input == output -> :ok
true -> File.write!(file, output)
end
:ok
end
defp collect_status({:ok, :ok}, acc), do: acc
defp collect_status({:ok, {:exit, _, _, _} = exit}, {exits, not_equivalent, not_formatted}) do
{[exit | exits], not_equivalent, not_formatted}
end
defp collect_status({:ok, {:not_equivalent, file}}, {exits, not_equivalent, not_formatted}) do
{exits, [file | not_equivalent], not_formatted}
end
defp collect_status({:ok, {:not_formatted, file}}, {exits, not_equivalent, not_formatted}) do
{exits, not_equivalent, [file | not_formatted]}
end
defp check!({[], [], []}) do
:ok
end
defp check!({[{:exit, file, exception, stacktrace} | _], _not_equivalent, _not_formatted}) do
Mix.shell().error("mix format failed for file: #{Path.relative_to_cwd(file)}")
reraise exception, stacktrace
end
defp check!({_exits, [_ | _] = not_equivalent, _not_formatted}) do
Mix.raise("""
mix format failed due to --check-equivalent.
The following files were not equivalent:
#{to_bullet_list(not_equivalent)}
Please report this bug with the input files at github.com/elixir-lang/elixir/issues
""")
end
defp check!({_exits, _not_equivalent, [_ | _] = not_formatted}) do
Mix.raise("""
mix format failed due to --check-formatted.
The following files were not formatted:
#{to_bullet_list(not_formatted)}
""")
end
defp to_bullet_list(files) do
Enum.map_join(files, "\n", &" * #{&1}")
end
defp equivalent?(input, output) do
Code.Formatter.equivalent(input, output) == :ok
end
end
| 34.399602 | 110 | 0.674392 |
1c979800173e7bd3f6c60a56423cb96c964d5751 | 12,920 | exs | Elixir | test/yesql_test.exs | zph/yesql | 08b3d83d01fa71da9e000896950d9a4c671e936a | [
"Apache-2.0"
] | null | null | null | test/yesql_test.exs | zph/yesql | 08b3d83d01fa71da9e000896950d9a4c671e936a | [
"Apache-2.0"
] | null | null | null | test/yesql_test.exs | zph/yesql | 08b3d83d01fa71da9e000896950d9a4c671e936a | [
"Apache-2.0"
] | 1 | 2020-02-04T17:48:37.000Z | 2020-02-04T17:48:37.000Z | defmodule YesqlTest do
use ExUnit.Case
doctest Yesql
import TestHelper
defmodule Query do
use Yesql, driver: Postgrex
Yesql.defquery("test/sql/select_older_cats.sql")
Yesql.defquery("test/sql/insert_cat.sql")
end
defmodule QueryConn do
use Yesql, driver: Postgrex, conn: YesqlTest.Postgrex
Yesql.defquery("test/sql/select_older_cats.sql")
Yesql.defquery("test/sql/insert_cat.sql")
end
defmodule QueryEcto do
use Yesql, driver: Ecto, conn: YesqlTest.Repo
Yesql.defquery("test/sql/select_older_cats.sql")
Yesql.defquery("test/sql/insert_cat.sql")
end
defmodule QueryMany do
use Yesql, driver: Postgrex
Yesql.defqueries("test/sql/two_queries.sql")
end
defmodule QueryManyConn do
use Yesql, driver: Postgrex, conn: YesqlTest.Postgrex
Yesql.defqueries("test/sql/two_queries.sql")
end
defmodule QueryManyEcto do
use Yesql, driver: Ecto, conn: YesqlTest.Repo
Yesql.defqueries("test/sql/two_queries.sql")
end
setup_all [:new_postgrex_connection, :create_cats_postgres_table]
describe "parse/1" do
import Yesql, only: [parse: 1]
test "simple tests" do
assert parse("SELECT * FROM person WHERE age > 18") ==
{:ok, "SELECT * FROM person WHERE age > 18", []}
assert parse("SELECT * FROM person WHERE age > :age") ==
{:ok, "SELECT * FROM person WHERE age > $1", [:age]}
assert parse("SELECT * FROM person WHERE :age > age") ==
{:ok, "SELECT * FROM person WHERE $1 > age", [:age]}
assert parse("SELECT 1 FROM dual") == {:ok, "SELECT 1 FROM dual", []}
assert parse("SELECT :value FROM dual") == {:ok, "SELECT $1 FROM dual", [:value]}
assert parse("SELECT 'test' FROM dual") == {:ok, "SELECT 'test' FROM dual", []}
assert parse("SELECT 'test'\nFROM dual") == {:ok, "SELECT 'test'\nFROM dual", []}
assert parse("SELECT :value, :other_value FROM dual") ==
{:ok, "SELECT $1, $2 FROM dual", [:value, :other_value]}
end
test "Tokenization rules" do
assert parse("SELECT :age-5 FROM dual") == {:ok, "SELECT $1-5 FROM dual", [:age]}
end
test "escapes" do
assert parse("SELECT :value, :other_value, ':not_a_value' FROM dual") ==
{:ok, "SELECT $1, $2, ':not_a_value' FROM dual", [:value, :other_value]}
assert parse(~S"SELECT 'not \' :a_value' FROM dual") ==
{:ok, ~S"SELECT 'not \' :a_value' FROM dual", []}
end
test "casting" do
assert parse("SELECT :value, :other_value, 5::text FROM dual") ==
{:ok, "SELECT $1, $2, 5::text FROM dual", [:value, :other_value]}
end
test "newlines are preserved" do
assert parse("SELECT :value, :other_value, 5::text\nFROM dual") ==
{:ok, "SELECT $1, $2, 5::text\nFROM dual", [:value, :other_value]}
end
test "complex 1" do
assert parse("SELECT :a+2*:b+age::int FROM users WHERE username = :name AND :b > 0") ==
{
:ok,
"SELECT $1+2*$2+age::int FROM users WHERE username = $3 AND $2 > 0",
[:a, :b, :name]
}
end
test "complex 2" do
assert parse("SELECT :value1 + :value2 + value3 + :value4 + :value1\nFROM SYSIBM.SYSDUMMY1") ==
{
:ok,
"SELECT $1 + $2 + value3 + $3 + $1\nFROM SYSIBM.SYSDUMMY1",
[:value1, :value2, :value4]
}
end
test "complex 3" do
assert parse("SELECT ARRAY [:value1] FROM dual") ==
{:ok, "SELECT ARRAY [$1] FROM dual", [:value1]}
end
end
describe "parse_many/1" do
import Yesql,
only: [parse_block: 1, parse_many: 1, parse: 1, split_to_blocks: 1]
test "simple tests" do
query = """
-- name: user-count
-- Counts all the users.
SELECT count(*) AS count FROM user
"""
two_queries = """
-- name: users-by-country
-- Counts the users in a given country.
-- Second line
SELECT count(*) AS count
FROM user
WHERE country_code = :country_code
-- name: user-count
-- Counts all the users.
SELECT count(*) AS count FROM user
"""
assert split_to_blocks(two_queries) |> Enum.count() == 2
assert split_to_blocks(two_queries) |> List.last() == """
-- name: user-count
-- Counts all the users.
SELECT count(*) AS count FROM user
"""
assert parse_block(query) == %{
name: :user_count,
description: "Counts all the users.",
sql: "SELECT count(*) AS count FROM user\n"
}
two_query_blocks = [
%{
description: "Counts the users in a given country.\nSecond line",
name: :users_by_country,
sql: "SELECT count(*) AS count\nFROM user\nWHERE country_code = :country_code\n"
},
%{
description: "Counts all the users.",
name: :user_count,
sql: "SELECT count(*) AS count FROM user\n"
}
]
two_query_parsed = [
%{
description: "Counts the users in a given country.\nSecond line",
name: :users_by_country,
sql: "SELECT count(*) AS count\nFROM user\nWHERE country_code = :country_code\n",
param_spec: [:country_code],
tokenized_sql: "SELECT count(*) AS count\nFROM user\nWHERE country_code = $1\n"
},
%{
description: "Counts all the users.",
name: :user_count,
sql: "SELECT count(*) AS count FROM user\n",
param_spec: [],
tokenized_sql: "SELECT count(*) AS count FROM user\n"
}
]
assert split_to_blocks(two_queries) |> Enum.map(&parse_block/1) == two_query_blocks
assert parse_many(two_queries) == {:ok, two_query_parsed}
result =
"test/sql/two_queries.sql"
|> File.read!()
|> Yesql.parse_many()
assert result ==
{:ok,
[
%{
description: "Important to find old cats",
name: :select_older_cats,
sql: "SELECT * FROM cats\nWHERE age > :age\nORDER BY age ASC\n",
param_spec: [:age],
tokenized_sql: "SELECT * FROM cats\nWHERE age > $1\nORDER BY age ASC\n"
},
%{
description: "",
name: :insert_cat,
sql: "INSERT INTO cats (age)\nVALUES (:age)\n",
param_spec: [:age],
tokenized_sql: "INSERT INTO cats (age)\nVALUES ($1)\n"
}
]}
end
end
describe "exec/4" do
setup [:truncate_postgres_cats]
test "unknown driver" do
assert_raise Yesql.UnknownDriver, "Unknown database driver Elixir.Boopatron\n", fn ->
Yesql.exec(self(), Boopatron, "", [], %{})
end
end
test "Postgrex insert", ctx do
sql = "INSERT INTO cats (age) VALUES ($1)"
assert {:ok, []} = Yesql.exec(ctx.postgrex, Postgrex, sql, [:age], %{age: 5})
end
test "Postgrex insert returning columns", ctx do
sql = "INSERT INTO cats (age) VALUES ($1), (10) RETURNING age"
assert Yesql.exec(ctx.postgrex, Postgrex, sql, [:age], %{age: 5}) ==
{:ok, [%{age: 5}, %{age: 10}]}
end
test "Postgrex select", ctx do
insert_sql = "INSERT INTO cats (age) VALUES ($1), (10)"
assert {:ok, []} = Yesql.exec(ctx.postgrex, Postgrex, insert_sql, [:age], %{age: 5})
sql = "SELECT * FROM cats"
assert {:ok, results} = Yesql.exec(ctx.postgrex, Postgrex, sql, [], %{})
assert results == [%{age: 5, name: nil}, %{age: 10, name: nil}]
end
test "Postgrex invalid insert", ctx do
insert_sql = "INSERT INTO cats (size) VALUES ($1), (10)"
assert {:error, error} = Yesql.exec(ctx.postgrex, Postgrex, insert_sql, [:age], %{age: 1})
assert error.postgres.message == "column \"size\" of relation \"cats\" does not exist"
end
end
describe "defqueries/2" do
setup [:truncate_postgres_cats]
test "query function is created" do
refute function_exported?(QueryMany, :select_older_cats, 1)
assert function_exported?(QueryMany, :select_older_cats, 2)
# The /1 arity function is called because conn isn't needed.
assert function_exported?(QueryManyConn, :select_older_cats, 1)
assert function_exported?(QueryManyConn, :select_older_cats, 2)
end
test "throws if map argument missing" do
assert_raise Yesql.MissingParam, "Required parameter `:age` not given\n", fn ->
QueryManyConn.select_older_cats(%{})
end
end
test "throws if keyword argument missing" do
assert_raise Yesql.MissingParam, "Required parameter `:age` not given\n", fn ->
QueryManyConn.select_older_cats(nil, [])
end
end
test "query exec with explicit conn", %{postgrex: conn} do
assert QueryMany.select_older_cats(conn, age: 5) == {:ok, []}
assert QueryMany.insert_cat(conn, age: 50) == {:ok, []}
assert QueryMany.select_older_cats(conn, age: 5) == {:ok, [%{age: 50, name: nil}]}
assert QueryMany.insert_cat(conn, age: 10) == {:ok, []}
assert QueryMany.select_older_cats(conn, age: 5) ==
{:ok, [%{age: 10, name: nil}, %{age: 50, name: nil}]}
assert QueryMany.insert_cat(conn, age: 1) == {:ok, []}
assert QueryMany.select_older_cats(conn, age: 5) ==
{:ok, [%{age: 10, name: nil}, %{age: 50, name: nil}]}
end
test "query exec with implicit conn" do
assert QueryManyConn.select_older_cats(age: 5) == {:ok, []}
assert QueryManyConn.insert_cat(age: 50) == {:ok, []}
assert QueryManyConn.select_older_cats(age: 5) == {:ok, [%{age: 50, name: nil}]}
assert QueryManyConn.insert_cat(age: 10) == {:ok, []}
assert QueryManyConn.select_older_cats(age: 5) ==
{:ok, [%{age: 10, name: nil}, %{age: 50, name: nil}]}
assert QueryManyConn.insert_cat(age: 1) == {:ok, []}
assert QueryManyConn.select_older_cats(age: 5) ==
{:ok, [%{age: 10, name: nil}, %{age: 50, name: nil}]}
end
test "query exec with Ecto driver" do
assert QueryManyEcto.select_older_cats(age: 5) == {:ok, []}
assert QueryManyEcto.insert_cat(age: 50) == {:ok, []}
assert QueryManyEcto.select_older_cats(age: 5) == {:ok, [%{age: 50, name: nil}]}
end
end
describe "defquery/2" do
setup [:truncate_postgres_cats]
test "query function is created" do
refute function_exported?(Query, :select_older_cats, 1)
assert function_exported?(Query, :select_older_cats, 2)
# The /1 arity function is called because conn isn't needed.
assert function_exported?(QueryConn, :select_older_cats, 1)
assert function_exported?(QueryConn, :select_older_cats, 2)
end
test "throws if map argument missing" do
assert_raise Yesql.MissingParam, "Required parameter `:age` not given\n", fn ->
QueryConn.select_older_cats(%{})
end
end
test "throws if keyword argument missing" do
assert_raise Yesql.MissingParam, "Required parameter `:age` not given\n", fn ->
QueryConn.select_older_cats(nil, [])
end
end
test "query exec with explicit conn", %{postgrex: conn} do
assert Query.select_older_cats(conn, age: 5) == {:ok, []}
assert Query.insert_cat(conn, age: 50) == {:ok, []}
assert Query.select_older_cats(conn, age: 5) == {:ok, [%{age: 50, name: nil}]}
assert Query.insert_cat(conn, age: 10) == {:ok, []}
assert Query.select_older_cats(conn, age: 5) ==
{:ok, [%{age: 10, name: nil}, %{age: 50, name: nil}]}
assert Query.insert_cat(conn, age: 1) == {:ok, []}
assert Query.select_older_cats(conn, age: 5) ==
{:ok, [%{age: 10, name: nil}, %{age: 50, name: nil}]}
end
test "query exec with implicit conn" do
assert QueryConn.select_older_cats(age: 5) == {:ok, []}
assert QueryConn.insert_cat(age: 50) == {:ok, []}
assert QueryConn.select_older_cats(age: 5) == {:ok, [%{age: 50, name: nil}]}
assert QueryConn.insert_cat(age: 10) == {:ok, []}
assert QueryConn.select_older_cats(age: 5) ==
{:ok, [%{age: 10, name: nil}, %{age: 50, name: nil}]}
assert QueryConn.insert_cat(age: 1) == {:ok, []}
assert QueryConn.select_older_cats(age: 5) ==
{:ok, [%{age: 10, name: nil}, %{age: 50, name: nil}]}
end
test "query exec with Ecto driver" do
assert QueryEcto.select_older_cats(age: 5) == {:ok, []}
assert QueryEcto.insert_cat(age: 50) == {:ok, []}
assert QueryEcto.select_older_cats(age: 5) == {:ok, [%{age: 50, name: nil}]}
end
end
end
| 35.300546 | 101 | 0.578096 |
1c97b65df9c13785a2f39032f26a0ab30ea17da4 | 567 | ex | Elixir | lib/ueberauth/strategy/line/api.ex | LucidModules/ueberauth_line | 686e41ddeef83ecaf55e21ca0bf0337645deb9c1 | [
"MIT"
] | null | null | null | lib/ueberauth/strategy/line/api.ex | LucidModules/ueberauth_line | 686e41ddeef83ecaf55e21ca0bf0337645deb9c1 | [
"MIT"
] | null | null | null | lib/ueberauth/strategy/line/api.ex | LucidModules/ueberauth_line | 686e41ddeef83ecaf55e21ca0bf0337645deb9c1 | [
"MIT"
] | null | null | null | defmodule Ueberauth.Strategy.Line.Api do
@moduledoc """
TODO: this is a wrapper on the line actions like verify token, authorize requests etc
keeping all methods in the Line module reduces readability
to get profile, an openid permission is required
TODO: maybe separate library as LineApi?
- is there any behaviour for the Client modules, like PHP's PSR ClientInterface?
"""
alias Http.Client
def issue_access_token() do
Client.get()
end
def verify_access_token do
end
def verify_id_token do
end
def get_profile do
end
end
| 21 | 87 | 0.744268 |
1c97dcb5103b1d0e8083e77adb5cfb9505fa30a2 | 1,516 | exs | Elixir | apps/ello_core/mix.exs | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 16 | 2017-06-21T21:31:20.000Z | 2021-05-09T03:23:26.000Z | apps/ello_core/mix.exs | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 25 | 2017-06-07T12:18:28.000Z | 2018-06-08T13:27:43.000Z | apps/ello_core/mix.exs | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 3 | 2018-06-14T15:34:07.000Z | 2022-02-28T21:06:13.000Z | defmodule Ello.Core.Mixfile do
use Mix.Project
def project do
[app: :ello_core,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.4",
elixirc_paths: elixirc_paths(Mix.env),
elixirc_options: [warnings_as_errors: Mix.env == :test],
aliases: aliases(),
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
def elixirc_paths(:test), do: ["lib", "test/support"]
def elixirc_paths(_), do: ["lib"]
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
# Specify extra applications you'll use from Erlang/Elixir
[extra_applications: [:logger],
mod: {Ello.Core.Application, []}]
end
def aliases do
[
test: ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
defp deps do
[
{:phoenix_ecto, "~> 4.0"},
{:postgrex, ">= 0.0.0"},
{:redix, "~> 0.10"},
{:jason, "~> 1.0"},
{:ex_machina, "~> 2.3"},
{:timex, "~> 3.0"},
{:ecto, "~> 3.0"},
{:ecto_sql, "~> 3.0"},
# 2019-05-07 - the 'newrelic' repo has out of date dependencies, disabling
# newrelic until we have bandwidth to update our code, maybe to new_relic
# {:newrelic_phoenix, github: "ello/newrelic_phoenix", branch: "master"},
]
end
end
| 27.563636 | 80 | 0.562005 |
1c982fb03f6b012a832a4800cba66766ed6b3cd4 | 1,854 | ex | Elixir | apps/ewallet/lib/ewallet/permissions/bouncer/scopes/membership_scope.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/ewallet/lib/ewallet/permissions/bouncer/scopes/membership_scope.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/ewallet/lib/ewallet/permissions/bouncer/scopes/membership_scope.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWallet.Bouncer.MembershipScope do
@moduledoc """
Permission scoping module for memberships.
"""
@behaviour EWallet.Bouncer.ScopeBehaviour
import Ecto.Query
alias EWallet.Bouncer.{Helper, Permission}
alias EWalletDB.{Membership, User, Key}
@spec scoped_query(EWallet.Bouncer.Permission.t()) ::
EWalletDB.Membership | nil | Ecto.Query.t()
def scoped_query(%Permission{
actor: actor,
global_abilities: global_abilities,
account_abilities: account_abilities
}) do
do_scoped_query(actor, global_abilities) || do_scoped_query(actor, account_abilities)
end
defp do_scoped_query(_actor, %{memberships: :global}) do
Membership
end
defp do_scoped_query(actor, %{memberships: :accounts}) do
actor
|> Helper.query_with_membership_for(Membership)
|> where([m, actor_m], m.account_uuid == actor_m.account_uuid)
|> distinct(true)
|> select([m, actor_m], m)
end
defp do_scoped_query(%User{is_admin: true} = user, %{memberships: :self}) do
where(Membership, [m], m.user_uuid == ^user.uuid)
end
defp do_scoped_query(%Key{} = key, %{memberships: :self}) do
where(Membership, [m], m.key_uuid == ^key.uuid)
end
defp do_scoped_query(_, _) do
nil
end
end
| 31.965517 | 89 | 0.71575 |
1c98311b314129753e70ebe1234470dfdcd06b8d | 1,625 | ex | Elixir | lib/ElCloud/search/storage.ex | Sapfir0/elCloud | 3ef9e8d2400898250ee65d36870672e76006c263 | [
"Unlicense"
] | 2 | 2021-05-30T20:11:44.000Z | 2021-12-20T19:08:56.000Z | lib/ElCloud/search/storage.ex | Sapfir0/elCloud | 3ef9e8d2400898250ee65d36870672e76006c263 | [
"Unlicense"
] | null | null | null | lib/ElCloud/search/storage.ex | Sapfir0/elCloud | 3ef9e8d2400898250ee65d36870672e76006c263 | [
"Unlicense"
] | 1 | 2022-01-23T05:29:08.000Z | 2022-01-23T05:29:08.000Z | defmodule ElCloud.Search.Helper do
@moduledoc """
The Store context.
"""
import Ecto.Query, warn: false
@data_dir Application.get_env(:elCloud, ElCloudWeb.FileStorageController)[:data_dir]
@indexes_file Application.get_env(:elCloud, ElCloud.Search.Helper)[:indexes_file]
@spec not_indexed_search(String.t(), String.t()) :: [String.t()]
def not_indexed_search(directory, query_filename) do
recursive_search(query_filename, directory)
end
@spec indexed_search(String.t(), String.t()) :: [String.t()]
def indexed_search(directory, query_filename) do
File.read!(@indexes_file)
|> Poison.decode!()
|> List.flatten()
|> Enum.filter(fn file ->
String.match?(file["filename"], ~r/#{query_filename}/)
end)
end
def create_indexes() do
File.write(@indexes_file, Poison.encode!(ElCloud.FileStorage.list_files_unsafe(@data_dir)), [:binary])
end
def recursive_search(query_filename, directory) do
File.ls!(directory)
|> Enum.map(fn file -> find_in_data_dir(directory, file, query_filename) end)
|> List.flatten()
|> Enum.filter(fn file -> String.match?(file.filename, ~r/#{query_filename}/) end)
end
def find_in_data_dir(directory, filename, query_filename) do
full_path = Path.join(directory, filename)
is_folder = File.dir?(full_path)
if is_folder, do: recursive_search(query_filename, full_path), else: %{
:is_folder => is_folder,
:filename => filename,
:path => full_path
} # создаем объект для всех элементов, а не только для тех, кто удовлетворяет условию, так что этот код не оч
end
end
| 31.25 | 113 | 0.695385 |
1c983328ef58d3105a1c97d193ecee9d34f93ad3 | 264 | exs | Elixir | priv/repo/migrations/20200211155547_add_spotify_to_podcasts.exs | gustavoarmoa/changelog.com | e898a9979a237ae66962714821ed8633a4966f37 | [
"MIT"
] | 2,599 | 2016-10-25T15:02:53.000Z | 2022-03-26T02:34:42.000Z | priv/repo/migrations/20200211155547_add_spotify_to_podcasts.exs | type1fool/changelog.com | fbec3528cc3f5adfdc75b008bb92b17efc4f248f | [
"MIT"
] | 253 | 2016-10-25T20:29:24.000Z | 2022-03-29T21:52:36.000Z | priv/repo/migrations/20200211155547_add_spotify_to_podcasts.exs | type1fool/changelog.com | fbec3528cc3f5adfdc75b008bb92b17efc4f248f | [
"MIT"
] | 298 | 2016-10-25T15:18:31.000Z | 2022-01-18T21:25:52.000Z | defmodule Changelog.Repo.Migrations.AddSpotifyToPodcasts do
use Ecto.Migration
def change do
alter table(:podcasts) do
add(:spotify_url, :string)
add(:welcome, :text)
end
rename table(:podcasts), :itunes_url, to: :apple_url
end
end
| 20.307692 | 59 | 0.69697 |
1c98849c804cc77e4b56f509510349ac4b65380c | 1,469 | ex | Elixir | lib/honu/attachments/attachment.ex | elixir-honu/honu | e82cbc4c2457b3d64b929cc013c17cdb4fcc8f6c | [
"MIT"
] | 1 | 2021-08-08T10:33:42.000Z | 2021-08-08T10:33:42.000Z | lib/honu/attachments/attachment.ex | elixir-honu/honu | e82cbc4c2457b3d64b929cc013c17cdb4fcc8f6c | [
"MIT"
] | null | null | null | lib/honu/attachments/attachment.ex | elixir-honu/honu | e82cbc4c2457b3d64b929cc013c17cdb4fcc8f6c | [
"MIT"
] | null | null | null | defmodule Honu.Attachments.Attachment do
import Ecto.Changeset
alias Honu.Attachments.AttachmentMap
def attachments_changeset(changeset, attrs, attachments_names) do
# TODO: Support both atom and string in map
# Currently only string keys are supported
Enum.reduce(attachments_names, changeset, fn {name, func}, cset ->
attachment_changeset(cset, attrs, {to_string(name), func})
end)
end
defp attachment_changeset(changeset, attrs, {attachment_name, changeset_func})
when is_function(changeset_func, 2) do
if upload = attrs[attachment_name] do
attachments = attachments_attrs(attachment_name, upload)
changeset
|> cast(Map.put(attrs, attachment_name, attachments), [])
|> cast_assoc(String.to_atom(attachment_name), with: changeset_func)
else
changeset
end
end
defp attachments_attrs(attachment_name, attachments) when is_list(attachments) do
Enum.reduce(attachments, [], fn upload, l ->
# upload :: Plug.Upload.t() | map()
[AttachmentMap.build(upload, attachment_name) | l]
end)
end
defp attachments_attrs(attachment_name, attachment) when is_map(attachment) do
AttachmentMap.build(attachment, attachment_name)
# Enum.reduce(Map.keys(attachments), %{}, fn key, m ->
# # upload :: Plug.Upload.t() | map()
# upload = attachments[key]
# Map.merge(m, %{key => AttachmentMap.build(upload, attachment_name)})
# end)
end
end
| 34.162791 | 83 | 0.701157 |
1c98ac6b915f40f9a8f1db9dc427845af1b138ed | 1,151 | exs | Elixir | Microsoft.Azure.Management.Containers/config/config.exs | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | 4 | 2018-09-29T03:43:15.000Z | 2021-04-01T18:30:46.000Z | Microsoft.Azure.Management.Containers/config/config.exs | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | Microsoft.Azure.Management.Containers/config/config.exs | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :container_service_client, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:container_service_client, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 37.129032 | 73 | 0.758471 |
1c98cb481430fb6fbaf064a14f35299b73f42d6f | 1,931 | ex | Elixir | lib/stellar/trades.ex | revelrylabs/elixir-stellar-client | 5866fc43fdc86260e0719a4764e8dd9327ef4731 | [
"MIT"
] | 25 | 2018-01-23T13:56:28.000Z | 2021-11-08T08:10:53.000Z | lib/stellar/trades.ex | revelrylabs/elixir-stellar-client | 5866fc43fdc86260e0719a4764e8dd9327ef4731 | [
"MIT"
] | 91 | 2018-01-30T20:10:44.000Z | 2022-01-12T19:50:24.000Z | lib/stellar/trades.ex | revelrylabs/elixir-stellar-client | 5866fc43fdc86260e0719a4764e8dd9327ef4731 | [
"MIT"
] | 5 | 2018-04-17T15:08:26.000Z | 2019-08-07T19:08:49.000Z | defmodule Stellar.Trades do
@moduledoc """
Functions for interacting with Trades
"""
alias Stellar.Base
@doc """
Returns all trades
optional `params` can take any of the following.:
* `base_asset_type`: Type of base asset.
* `base_asset_code`: Code of base asset, not required if type is `native`.
* `base_asset_issuer`: Issuer of base asset, not required if type is `native`.
* `counter_asset_type`: Type of counter asset.
* `counter_asset_code`: Code of counter asset, not required if type is `native`.
* `counter_asset_issuer`: Issuer of counter asset, not required if type is `native`.
* `offer_id`: filter for by a specific offer id.
* `cursor`: A paging token, specifying where to start returning records from.
* `order`: The order in which to return rows, "asc" or "desc".
* `limit`: Maximum number of records to return.
"""
@spec all(Keyword.t()) :: {Stellar.status(), map}
def all(params \\ []) do
query = Base.process_query_params(params)
Base.get("/trades#{query}")
end
@doc """
Returns all trades for given order book
optional `params` can take any of the following.:
* `selling_asset_code`: Code of the Asset being sold.
* `selling_asset_issuer`: Account ID of the issuer of the Asset being sold.
* `buying_asset_code`: Code of the Asset being bought.
* `buying_asset_issuer`: Account ID of the issuer of the Asset being bought.
* `limit`: Maximum number of records to return.
"""
@spec all_for_order_book(Stellar.asset_type(), Stellar.asset_type(), Keyword.t()) ::
{Stellar.status(), map}
def all_for_order_book(selling_asset_type, buying_asset_type, params \\ []) do
params =
params
|> Keyword.put(:selling_asset_type, selling_asset_type)
|> Keyword.put(:buying_asset_type, buying_asset_type)
query = Base.process_query_params(params)
Base.get("/order_book/trades#{query}")
end
end
| 29.707692 | 86 | 0.695495 |
1c98e28067a6489d9106a6b9d6a63bcce8f90144 | 1,172 | exs | Elixir | config/config.exs | FloatingGhost/stixex | c3b012d0e8596fde6bd512f856f05b0187bb5273 | [
"MIT"
] | 1 | 2019-05-07T22:44:45.000Z | 2019-05-07T22:44:45.000Z | config/config.exs | FloatingGhost/stixex | c3b012d0e8596fde6bd512f856f05b0187bb5273 | [
"MIT"
] | null | null | null | config/config.exs | FloatingGhost/stixex | c3b012d0e8596fde6bd512f856f05b0187bb5273 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# third-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :stixex, key: :value
#
config :stixex, enforce_vocabularies: false
# and access this configuration in your application as:
#
# Application.get_env(:stixex, :key)
#
# You can also configure a third-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env()}.exs"
| 35.515152 | 73 | 0.753413 |
1c98ecb766ecd017ac1fe10c7da5104174b0d769 | 6,536 | ex | Elixir | clients/ad_sense_host/lib/google_api/ad_sense_host/v41/api/associationsessions.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/ad_sense_host/lib/google_api/ad_sense_host/v41/api/associationsessions.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/ad_sense_host/lib/google_api/ad_sense_host/v41/api/associationsessions.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AdSenseHost.V41.Api.Associationsessions do
@moduledoc """
API calls for all endpoints tagged `Associationsessions`.
"""
alias GoogleApi.AdSenseHost.V41.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Create an association session for initiating an association with an AdSense user.
## Parameters
* `connection` (*type:* `GoogleApi.AdSenseHost.V41.Connection.t`) - Connection to server
* `product_code` (*type:* `list(String.t)`) - Products to associate with the user.
* `website_url` (*type:* `String.t`) - The URL of the user's hosted website.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:callbackUrl` (*type:* `String.t`) - The URL to redirect the user to once association is completed. It receives a token parameter that can then be used to retrieve the associated account.
* `:userLocale` (*type:* `String.t`) - The preferred locale of the user.
* `:websiteLocale` (*type:* `String.t`) - The locale of the user's hosted website.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AdSenseHost.V41.Model.AssociationSession{}}` on success
* `{:error, info}` on failure
"""
@spec adsensehost_associationsessions_start(
Tesla.Env.client(),
list(String.t()),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.AdSenseHost.V41.Model.AssociationSession.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def adsensehost_associationsessions_start(
connection,
product_code,
website_url,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:callbackUrl => :query,
:userLocale => :query,
:websiteLocale => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/associationsessions/start", %{})
|> Request.add_param(:query, :productCode, product_code)
|> Request.add_param(:query, :websiteUrl, website_url)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.AdSenseHost.V41.Model.AssociationSession{}])
end
@doc """
Verify an association session after the association callback returns from AdSense signup.
## Parameters
* `connection` (*type:* `GoogleApi.AdSenseHost.V41.Connection.t`) - Connection to server
* `token` (*type:* `String.t`) - The token returned to the association callback URL.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AdSenseHost.V41.Model.AssociationSession{}}` on success
* `{:error, info}` on failure
"""
@spec adsensehost_associationsessions_verify(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.AdSenseHost.V41.Model.AssociationSession.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def adsensehost_associationsessions_verify(connection, token, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/associationsessions/verify", %{})
|> Request.add_param(:query, :token, token)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.AdSenseHost.V41.Model.AssociationSession{}])
end
end
| 42.167742 | 198 | 0.642442 |
1c994619782834e5c0525c3121add43721bb1f9c | 10,427 | exs | Elixir | apps/ewallet/test/ewallet/fetchers/amount_fetcher_test.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | apps/ewallet/test/ewallet/fetchers/amount_fetcher_test.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | apps/ewallet/test/ewallet/fetchers/amount_fetcher_test.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWallet.AmountFetcherTest do
use EWallet.DBCase, async: true
import EWalletDB.Factory
alias EWallet.AmountFetcher
describe "fetch/3 with amount" do
test "returns error when passing amount and from_token_id/to_token_id" do
token_1 = insert(:token)
token_2 = insert(:token)
res =
AmountFetcher.fetch(
%{
"amount" => 0,
"from_token_id" => token_1.id,
"to_token_id" => token_2.id
},
%{},
%{}
)
assert res ==
{:error, :invalid_parameter,
"Invalid parameter provided. `amount` not allowed when exchanging values. Use `from_amount` and/or `to_amount`."}
end
test "sets the amount in from_amount and to_amount" do
{res, from, to, exchange} =
AmountFetcher.fetch(
%{
"amount" => 100
},
%{},
%{}
)
assert res == :ok
assert from == %{from_amount: 100}
assert to == %{to_amount: 100}
assert exchange == %{}
end
test "supports string integer" do
{res, from, to, exchange} =
AmountFetcher.fetch(
%{
"amount" => "100"
},
%{},
%{}
)
assert res == :ok
assert from == %{from_amount: 100}
assert to == %{to_amount: 100}
assert exchange == %{}
end
test "returns an error if amount is not an integer (float)" do
res =
AmountFetcher.fetch(
%{
"amount" => 100.2
},
%{},
%{}
)
assert res ==
{:error, :invalid_parameter,
"Invalid parameter provided. `amount` is not an integer: 100.2"}
end
test "returns an error if amount is not an integer (string)" do
res =
AmountFetcher.fetch(
%{
"amount" => "fake"
},
%{},
%{}
)
assert res ==
{:error, :invalid_parameter,
"Invalid parameter provided. String number is not a valid number: 'fake'."}
end
end
describe "fetch/3 with from_amount/to_amount" do
test "sets from_amount and to_amount when valid integer" do
token_1 = insert(:token)
token_2 = insert(:token)
pair = insert(:exchange_pair, from_token: token_1, to_token: token_2, rate: 2)
{res, from, to, exchange} =
AmountFetcher.fetch(
%{
"from_amount" => 100,
"to_amount" => 200
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res == :ok
assert from[:from_amount] == 100
assert to[:to_amount] == 200
assert exchange[:actual_rate] == 2
assert exchange[:calculated_at] != nil
assert exchange[:pair_uuid] == pair.uuid
end
test "support string integers" do
token_1 = insert(:token)
token_2 = insert(:token)
pair = insert(:exchange_pair, from_token: token_1, to_token: token_2, rate: 2)
{res, from, to, exchange} =
AmountFetcher.fetch(
%{
"from_amount" => "100",
"to_amount" => "200"
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res == :ok
assert from[:from_amount] == 100
assert to[:to_amount] == 200
assert exchange[:actual_rate] == 2
assert exchange[:calculated_at] != nil
assert exchange[:pair_uuid] == pair.uuid
end
test "sets from_amount only when sending nil to_amount with exchange pair" do
token_1 = insert(:token)
token_2 = insert(:token)
pair = insert(:exchange_pair, from_token: token_1, to_token: token_2, rate: 2)
{res, from, to, exchange} =
AmountFetcher.fetch(
%{
"from_amount" => 100,
"to_amount" => nil
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res == :ok
assert from[:from_amount] == 100
assert to[:to_amount] == 200
assert exchange[:actual_rate] == 2
assert exchange[:calculated_at] != nil
assert exchange[:pair_uuid] == pair.uuid
end
test "sets from_amount only when sending nil to_amount without exchange pair" do
token_1 = insert(:token)
token_2 = insert(:token)
res =
AmountFetcher.fetch(
%{
"from_amount" => 100,
"to_amount" => nil
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res == {:error, :exchange_pair_not_found}
end
test "sets from_amount only when not sending to_amount with exchange pair" do
token_1 = insert(:token)
token_2 = insert(:token)
pair = insert(:exchange_pair, from_token: token_1, to_token: token_2, rate: 2)
{res, from, to, exchange} =
AmountFetcher.fetch(
%{
"from_amount" => 100
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res == :ok
assert from[:from_amount] == 100
assert to[:to_amount] == 200
assert exchange[:actual_rate] == 2
assert exchange[:calculated_at] != nil
assert exchange[:pair_uuid] == pair.uuid
end
test "sets from_amount only when not sending to_amount without exchange pair" do
token_1 = insert(:token)
token_2 = insert(:token)
res =
AmountFetcher.fetch(
%{
"from_amount" => 100
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res == {:error, :exchange_pair_not_found}
end
test "sets to_amount only when sending nil from_amount with exchange pair" do
token_1 = insert(:token)
token_2 = insert(:token)
pair = insert(:exchange_pair, from_token: token_1, to_token: token_2, rate: 2)
{res, from, to, exchange} =
AmountFetcher.fetch(
%{
"from_amount" => nil,
"to_amount" => 200
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res == :ok
assert from[:from_amount] == 100
assert to[:to_amount] == 200
assert exchange[:actual_rate] == 2
assert exchange[:calculated_at] != nil
assert exchange[:pair_uuid] == pair.uuid
end
test "sets to_amount only when sending nil from_amount without exchange pair" do
token_1 = insert(:token)
token_2 = insert(:token)
res =
AmountFetcher.fetch(
%{
"from_amount" => nil,
"to_amount" => 200
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res == {:error, :exchange_pair_not_found}
end
test "sets to_amount only when not sending from_amount with exchange rate" do
token_1 = insert(:token)
token_2 = insert(:token)
pair = insert(:exchange_pair, from_token: token_1, to_token: token_2, rate: 2)
{res, from, to, exchange} =
AmountFetcher.fetch(
%{
"to_amount" => 200
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res == :ok
assert from[:from_amount] == 100
assert to[:to_amount] == 200
assert exchange[:actual_rate] == 2
assert exchange[:calculated_at] != nil
assert exchange[:pair_uuid] == pair.uuid
end
test "returns an error when exchange pair is not found" do
token_1 = insert(:token)
token_2 = insert(:token)
res =
AmountFetcher.fetch(
%{
"to_amount" => 200
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res == {:error, :exchange_pair_not_found}
end
test "returns an error when sending invalid from_amount and to_amount" do
res =
AmountFetcher.fetch(
%{
"from_amount" => "fake",
"to_amount" => "fake"
},
%{},
%{}
)
assert res ==
{:error, :invalid_parameter,
"Invalid parameter provided. String numbers are not valid numbers: 'fake, fake'."}
end
test "returns an error when sending invalid from_amount" do
res =
AmountFetcher.fetch(
%{
"from_amount" => "fake"
},
%{},
%{}
)
assert res ==
{:error, :invalid_parameter,
"Invalid parameter provided. String number is not a valid number: 'fake'."}
end
test "returns an error when sending invalid to_amount" do
res =
AmountFetcher.fetch(
%{
"to_amount" => "fake"
},
%{},
%{}
)
assert res ==
{:error, :invalid_parameter,
"Invalid parameter provided. String number is not a valid number: 'fake'."}
end
test "returns an error when sending nil to_amount" do
token_1 = insert(:token)
token_2 = insert(:token)
res =
AmountFetcher.fetch(
%{
"to_amount" => nil
},
%{from_token: token_1},
%{to_token: token_2}
)
assert res ==
{:error, :invalid_parameter,
"Invalid parameter provided. `amount`, `from_amount` or `to_amount` is required."}
end
end
describe "fetch/3 with invalid params" do
test "returns an error when sending nil to_amount" do
res = AmountFetcher.fetch(%{}, %{}, %{})
assert res ==
{:error, :invalid_parameter,
"Invalid parameter provided. `amount`, `from_amount` or `to_amount` is required."}
end
end
end
| 27.295812 | 129 | 0.545603 |
1c996d3c0ccbe99d7cc147a63aee0d359388d40a | 13,887 | ex | Elixir | lib/wallaby/query.ex | braynm/wallaby | bdc7716285ad657dca0847144213ed98fff3a63c | [
"MIT"
] | 926 | 2016-03-28T17:01:54.000Z | 2019-11-05T11:59:58.000Z | lib/wallaby/query.ex | braynm/wallaby | bdc7716285ad657dca0847144213ed98fff3a63c | [
"MIT"
] | 377 | 2016-03-17T00:35:56.000Z | 2019-11-03T07:15:24.000Z | lib/wallaby/query.ex | braynm/wallaby | bdc7716285ad657dca0847144213ed98fff3a63c | [
"MIT"
] | 144 | 2016-03-29T15:28:28.000Z | 2019-10-31T00:48:42.000Z | defmodule Wallaby.Query do
@moduledoc ~S"""
Provides the query DSL.
Queries are used to locate and retrieve DOM elements from a browser (see
`Wallaby.Browser`). You create queries like so:
```
Query.css(".some-css")
Query.xpath(".//input")
```
## Form elements
There are several custom finders for locating form elements. Each of these allows
finding by their name, id text, or label text. This allows for more robust querying
and decouples the query from presentation selectors like CSS classes.
```
Query.text_field("My Name")
Query.checkbox("Checkbox")
Query.select("A Fancy Select Box")
```
## Query Options
All of the query operations accept the following options:
- `:count` - The number of elements that should be found or `:any` (default: 1).
- If a `:minimum` or `:maximum` is specified, it defaults to `nil`.
- `:minimum` - The minimum number of elements that should be found, or `nil` (default: `nil`).
- `:maximum` - The maximum number of elements that should be found, or `nil` (default: `nil`).
- `:visible` - Determines if the query should return only visible elements (default: `true`).
- `:selected` - Determines if the query should return only selected elements (default: `:any`).
- `:text` - Text that should be found inside the element (default: `nil`).
- `:at` - The position (a number or `:all`) of the element to return if multiple elements satisfy the query. (default: `:all`)
Query options can also be set via functions by the same names:
```
Query.css(".names")
|> Query.visible(true)
|> Query.count(3)
```
## Re-using queries
It is often convenient to re-use queries. The easiest way is to use module
attributes:
```
@name_field Query.text_field("User Name")
@submit_button Query.button("Save")
```
If the queries need to be dynamic then you should create a module that
encapsulates the queries as functions:
```
defmodule TodoListPage do
def todo_list do
Query.css(".todo-list")
end
def todos(count) do
Query.css(".todo", count: count)
end
end
```
## What does my query do?
Wanna check out what exactly your query will do? Look no further than
`Wallaby.Query.compile/1` - it takes a query and returns the CSS or xpath
query that will be sent to the driver:
iex> Wallaby.Query.compile Wallaby.Query.text("my text")
{:xpath, ".//*[contains(normalize-space(text()), \"my text\")]"}
So, whenever you're not sure whatever a specific query will do just compile
it to get all the details!
"""
alias __MODULE__
alias Wallaby.Element
alias Wallaby.Query.XPath
defstruct method: nil,
selector: nil,
html_validation: nil,
conditions: [],
result: []
@type method ::
:css
| :xpath
| :link
| :button
| :fillable_field
| :checkbox
| :radio_button
| :option
| :select
| :file_field
| :attribute
@type attribute_key_value_pair :: {String.t(), String.t()}
@type selector ::
String.t()
| attribute_key_value_pair()
@type html_validation ::
:bad_label
| :button_type
| nil
@type conditions :: [
count: non_neg_integer | :any | nil,
minimum: non_neg_integer | nil,
maximum: non_neg_integer | nil,
text: String.t() | nil,
visible: boolean() | :any,
selected: boolean() | :any,
at: non_neg_integer | :all
]
@type result :: list(Element.t())
@type opts :: list()
@type t :: %__MODULE__{
method: method(),
selector: selector(),
html_validation: html_validation(),
conditions: conditions(),
result: result()
}
@type compiled :: {:xpath | :css, String.t()}
@doc """
Literally queries for the CSS selector you provide.
"""
def css(selector, opts \\ []) do
%Query{
method: :css,
selector: selector,
conditions: build_conditions(opts)
}
end
@doc """
Literally queries for the xpath selector you provide.
"""
def xpath(selector, opts \\ []) do
%Query{
method: :xpath,
selector: selector,
conditions: build_conditions(opts)
}
end
@doc """
This function can be used in one of two ways.
The first is by providing a selector and possible options. This generates a
query that checks if the provided text is contained anywhere.
## Example
```
Query.text("Submit", count: 1)
```
The second is by providing an existing query and a value to set as the `text`
option.
## Example
```
submit_button = Query.css("#submit-button")
update_button = submit_button |> Query.text("Update")
create_button = submit_button |> Query.text("Create")
```
"""
def text(query_or_selector, value_or_opts \\ [])
def text(%Query{} = query, value) do
update_condition(query, :text, value)
end
def text(selector, opts) do
%Query{
method: :text,
selector: selector,
conditions: build_conditions(opts)
}
end
@doc """
Checks if the provided value is contained anywhere.
"""
def value(selector, opts \\ []) do
attribute("value", selector, opts)
end
@doc """
Checks if the data attribute is contained anywhere.
"""
def data(name, selector, opts \\ []) do
attribute("data-#{name}", selector, opts)
end
@doc """
Checks if the provided attribute, value pair is contained anywhere.
"""
def attribute(name, value, opts \\ []) do
%Query{
method: :attribute,
selector: {name, value},
conditions: build_conditions(opts)
}
end
@doc """
See `Wallaby.Query.fillable_field/2`.
"""
def text_field(selector, opts \\ []) do
%Query{
method: :fillable_field,
selector: selector,
conditions: build_conditions(opts),
html_validation: :bad_label
}
end
@doc """
Looks for a text input field where the provided selector is the id, name or
placeholder of the text field itself or alternatively the id or the text of
the label.
"""
def fillable_field(selector, opts \\ []) do
%Query{
method: :fillable_field,
selector: selector,
conditions: build_conditions(opts),
html_validation: :bad_label
}
end
@doc """
Looks for a radio button where the provided selector is the id, name or
placeholder of the radio button itself or alternatively the id or the text of
the label.
"""
def radio_button(selector, opts \\ []) do
%Query{
method: :radio_button,
selector: selector,
conditions: build_conditions(opts),
html_validation: :bad_label
}
end
@doc """
Looks for a checkbox where the provided selector is the id, name or
placeholder of the checkbox itself or alternatively the id or the text of
the label.
"""
def checkbox(selector, opts \\ []) do
%Query{
method: :checkbox,
selector: selector,
conditions: build_conditions(opts),
html_validation: :bad_label
}
end
@doc """
Looks for a select box where the provided selector is the id or name of the
select box itself or alternatively the id or the text of the label.
"""
def select(selector, opts \\ []) do
%Query{
method: :select,
selector: selector,
conditions: build_conditions(opts),
html_validation: :bad_label
}
end
@doc """
Looks for an option that contains the given text.
"""
def option(selector, opts \\ []) do
%Query{
method: :option,
selector: selector,
conditions: build_conditions(opts),
html_validation: :bad_label
}
end
@doc """
Looks for a button (literal button or input type button, submit, image or
reset) where the provided selector is the id, name, value, alt or title of the
button.
"""
def button(selector, opts \\ []) do
%Query{
method: :button,
selector: selector,
conditions: build_conditions(opts),
html_validation: :button_type
}
end
@doc """
Looks for a link where the selector is the id, link text, title of the link
itself or the alt of an image child node.
"""
def link(selector, opts \\ []) do
%Query{
method: :link,
selector: selector,
conditions: build_conditions(opts),
html_validation: :bad_label
}
end
@doc """
Looks for a file input where the selector is the id or name of the file input
itself or the id or text of the label.
"""
def file_field(selector, opts \\ []) do
%Query{
method: :file_field,
selector: selector,
conditions: build_conditions(opts),
html_validation: :bad_label
}
end
@doc """
Updates a query's visibility (visible if `true`, hidden if `false`).
## Examples
```
Query.css("#modal")
|> Query.visible(true)
Query.css("#modal")
|> Query.visible(false)
```
"""
def visible(query, value) do
update_condition(query, :visible, value)
end
@doc """
Updates a query's `selected` option.
## Examples
```
Query.css("#select-dropdown")
|> Query.selected(true)
Query.css("#select-dropdown")
|> Query.selected(false)
```
"""
def selected(query, value) do
update_condition(query, :selected, value)
end
@doc """
Updates a query's `count`.
The `count` specifies how many elements you expect to be present within the scope
of the query and can be any number greater than zero or `:any`.
## Example
```elixir
# Exactly 2 elements
Query.css(".names > li")
|> Query.count(2)
# Any number of elements
Query.css(".names > li")
|> Query.count(:any)
```
"""
def count(query, value) do
update_condition(query, :count, value)
end
@doc """
Updates a query's `at` option.
## Example
```
Query.css(".names")
|> Query.at(3)
```
"""
def at(query, value) do
update_condition(query, :at, value)
end
def validate(query) do
cond do
query.conditions[:minimum] > query.conditions[:maximum] ->
{:error, :min_max}
Query.visible?(query) != true && Query.inner_text(query) ->
{:error, :cannot_set_text_with_invisible_elements}
true ->
{:ok, query}
end
end
@doc """
Compiles a query into CSS or xpath so its ready to be sent to the driver
iex> Wallaby.Query.compile Wallaby.Query.text("my text")
{:xpath, ".//*[contains(normalize-space(text()), \\"my text\\")]"}
iex> Wallaby.Query.compile Wallaby.Query.css("#some-id")
{:css, "#some-id"}
"""
@spec compile(t) :: compiled
def compile(%{method: :css, selector: selector}), do: {:css, selector}
def compile(%{method: :xpath, selector: selector}), do: {:xpath, selector}
def compile(%{method: :link, selector: selector}), do: {:xpath, XPath.link(selector)}
def compile(%{method: :button, selector: selector}), do: {:xpath, XPath.button(selector)}
def compile(%{method: :fillable_field, selector: selector}),
do: {:xpath, XPath.fillable_field(selector)}
def compile(%{method: :checkbox, selector: selector}), do: {:xpath, XPath.checkbox(selector)}
def compile(%{method: :radio_button, selector: selector}),
do: {:xpath, XPath.radio_button(selector)}
def compile(%{method: :option, selector: selector}), do: {:xpath, XPath.option(selector)}
def compile(%{method: :select, selector: selector}), do: {:xpath, XPath.select(selector)}
def compile(%{method: :file_field, selector: selector}),
do: {:xpath, XPath.file_field(selector)}
def compile(%{method: :text, selector: selector}), do: {:xpath, XPath.text(selector)}
def compile(%{method: :attribute, selector: {name, value}}),
do: {:xpath, XPath.attribute(name, value)}
def visible?(%Query{conditions: conditions}) do
Keyword.get(conditions, :visible)
end
def selected?(%Query{conditions: conditions}) do
Keyword.get(conditions, :selected)
end
def count(%Query{conditions: conditions}) do
Keyword.get(conditions, :count)
end
def at_number(%Query{conditions: conditions}) do
Keyword.get(conditions, :at)
end
def inner_text(%Query{conditions: conditions}) do
Keyword.get(conditions, :text)
end
def result(query) do
if specific_element_requested(query) do
[element] = query.result
element
else
query.result
end
end
def specific_element_requested(query) do
count(query) == 1 || at_number(query) != :all
end
def matches_count?(%{conditions: conditions}, count) do
cond do
conditions[:count] == :any ->
count > 0
conditions[:count] ->
conditions[:count] == count
true ->
!(conditions[:minimum] && conditions[:minimum] > count) &&
!(conditions[:maximum] && conditions[:maximum] < count)
end
end
defp build_conditions(opts) do
opts
|> add_visibility()
|> add_text()
|> add_count()
|> add_selected()
|> add_at()
end
defp add_visibility(opts) do
Keyword.put_new(opts, :visible, true)
end
defp add_selected(opts) do
Keyword.put_new(opts, :selected, :any)
end
defp add_text(opts) do
Keyword.put_new(opts, :text, nil)
end
defp add_count(opts) do
if opts[:count] == nil && opts[:minimum] == nil && opts[:maximum] == nil do
Keyword.put(opts, :count, 1)
else
opts
|> Keyword.put_new(:count, nil)
|> Keyword.put_new(:minimum, nil)
|> Keyword.put_new(:maximum, nil)
end
end
defp add_at(opts) do
Keyword.put_new(opts, :at, :all)
end
defp update_condition(%Query{conditions: conditions} = query, key, value) do
updated_conditions = Keyword.put(conditions, key, value)
%Query{query | conditions: updated_conditions}
end
end
| 25.527574 | 130 | 0.631166 |
1c997f67a7fcac87853a4c3dd9636b2f543ffd87 | 151 | ex | Elixir | lib/apoc/hazmat/hash/sha.ex | auxesis/apoc | e650c21767f508a2720dad1bb3d14439bdcf39c4 | [
"Apache-2.0"
] | 6 | 2018-10-04T14:18:35.000Z | 2020-05-15T08:43:31.000Z | lib/apoc/hazmat/hash/sha.ex | auxesis/apoc | e650c21767f508a2720dad1bb3d14439bdcf39c4 | [
"Apache-2.0"
] | 3 | 2018-10-23T12:20:45.000Z | 2021-01-27T10:41:14.000Z | lib/apoc/hazmat/hash/sha.ex | auxesis/apoc | e650c21767f508a2720dad1bb3d14439bdcf39c4 | [
"Apache-2.0"
] | 2 | 2020-02-19T00:43:37.000Z | 2021-08-19T04:04:25.000Z | defmodule Apoc.Hazmat.Hash.SHA do
use Apoc.Adapter.Hash
@impl Apoc.Adapter.Hash
def hash!(message) do
:crypto.hash(:sha, message)
end
end
| 16.777778 | 33 | 0.708609 |
1c998a4dc3c5e751074f5637ca2ec9d1af9228f4 | 1,565 | ex | Elixir | clients/service_user/lib/google_api/service_user/v1/model/custom_http_pattern.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/service_user/lib/google_api/service_user/v1/model/custom_http_pattern.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/service_user/lib/google_api/service_user/v1/model/custom_http_pattern.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceUser.V1.Model.CustomHttpPattern do
@moduledoc """
A custom pattern is used for defining custom HTTP verb.
## Attributes
* `kind` (*type:* `String.t`, *default:* `nil`) - The name of this custom HTTP verb.
* `path` (*type:* `String.t`, *default:* `nil`) - The path matched by this custom verb.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:kind => String.t(),
:path => String.t()
}
field(:kind)
field(:path)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceUser.V1.Model.CustomHttpPattern do
def decode(value, options) do
GoogleApi.ServiceUser.V1.Model.CustomHttpPattern.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceUser.V1.Model.CustomHttpPattern do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.3 | 91 | 0.719489 |
1c99956cbe983c20d73876f805916563d6789fd9 | 2,625 | ex | Elixir | lumberjack/lib/lumberjack.ex | lucas-larsson/ID1019 | b21a79bfa7fbbaaba0b4db88ec8b44fc5e9f291c | [
"MIT"
] | null | null | null | lumberjack/lib/lumberjack.ex | lucas-larsson/ID1019 | b21a79bfa7fbbaaba0b4db88ec8b44fc5e9f291c | [
"MIT"
] | null | null | null | lumberjack/lib/lumberjack.ex | lucas-larsson/ID1019 | b21a79bfa7fbbaaba0b4db88ec8b44fc5e9f291c | [
"MIT"
] | null | null | null | defmodule Lumberjack do
@moduledoc """
Documentation for `Lumberjack`.
"""
@doc """
Hello world.
## Examples
iex> Lumberjack.hello()
:world
"""
def hello do
:world
end
def split(seq) do split(seq, 0, [], []) end
def split([], l, left, right) do
[{left, right, l}]
end
def split([s|rest], l, left, right) do
split(rest, l+s, [s|left], right) ++ split(rest, l+s, left, [s|right])
end
def cost([]) do 0 end
def cost([h]) do {0, h} end
def cost(seq) do cost(seq, 0, [], []) end
def cost([], l, left, right) do
{cl, tl} = cost(left)
{cr, tr} = cost(right)
{cl+cr+l, {tl, tr}}
end
def cost([s], l, [], right) do
{cr, tr} = cost(right)
{cr+l+s, {s, tr}}
end
def cost([s], l, left, []) do
{cl, tl} = cost(left)
{cl+l+s, {s, tl}}
end
def cost([s|rest], l, left, right) do
{costl, tl} = cost(rest, l+s, [s|left], right)
{costr, tr} = cost(rest, l+s, left, [s|right])
if costl < costr do
{costl, tl}
else
{costr, tr}
end
end
def cost2_0([]) do {0, :nope} end
def cost2_0(seq) do
{cost, tree, _} = cost2_0(Enum.sort(seq), Memo.new())
{cost, tree}
end
def cost2_0([s], mem) do {0, s, mem} end
def cost2_0([s|rest]=seq, mem) do
{c, t, mem} = cost2_0(rest, s, [s], [], mem)
{c, t, Memo.add(mem, seq, {c, t})}
end
def cost2_0([], l, left, right, mem) do
{cl, tl, mem} = check(Enum.reverse(left), mem)
{cr, tr, mem} = check(Enum.reverse(right), mem)
{cr+cl+l, {tl, tr}, mem}
end
def cost2_0([s], l, left, [], mem) do
{c, t, mem} = check(Enum.reverse(left), mem)
{c+s+l, {t, s}, mem}
end
def cost2_0([s], l, [], right, mem) do
{c, t, mem} = check(Enum.reverse(right), mem)
{c+s+l, {t, s}, mem}
end
def cost2_0([s|rest], l, left, right, mem) do
{cl, tl, mem} = cost2_0(rest, l+s, [s|left], right, mem)
{cr, tr, mem} = cost2_0(rest, l+s, left, [s|right], mem)
if cl < cr do
{cl, tl, mem}
else
{cr, tr, mem}
end
end
def check(seq, mem) do
case Memo.lookup(mem, seq) do
nil ->
cost2_0(seq, mem)
{c, t} ->
{c, t, mem}
end
end
def bench(n) do
for i <- 1..n do
{t,_} = :timer.tc(fn() -> cost2_0(Enum.to_list(1..i)) end)
IO.puts(" n = #{i}\t t = #{t} us")
end
end
end
| 24.305556 | 78 | 0.459429 |
1c9999925985cf2c4fc667c88695e7aed37ae4fb | 817 | exs | Elixir | test/web/controllers/profile_controller_test.exs | rozerosie/heycake | d080531705c0402fa53696d02307d6c08d25a60a | [
"MIT"
] | 7 | 2019-12-13T19:23:47.000Z | 2022-01-22T23:02:42.000Z | test/web/controllers/profile_controller_test.exs | rozerosie/heycake | d080531705c0402fa53696d02307d6c08d25a60a | [
"MIT"
] | 11 | 2021-03-10T01:57:00.000Z | 2021-08-31T18:30:54.000Z | test/web/controllers/profile_controller_test.exs | rozerosie/heycake | d080531705c0402fa53696d02307d6c08d25a60a | [
"MIT"
] | 2 | 2020-06-03T22:08:06.000Z | 2022-03-11T22:13:36.000Z | defmodule Web.ProfileControllerTest do
use Web.ConnCase
describe "registering a new user" do
test "successful", %{conn: conn} do
{:ok, user} = TestHelpers.create_user()
params = %{
email: "user@example.com",
first_name: "John",
last_name: "Smith"
}
conn =
conn
|> assign(:current_user, user)
|> put(Routes.profile_path(conn, :update), user: params)
assert redirected_to(conn) == Routes.profile_path(conn, :show)
end
test "failure", %{conn: conn} do
{:ok, user} = TestHelpers.create_user()
params = %{first_name: nil}
conn =
conn
|> assign(:current_user, user)
|> put(Routes.profile_path(conn, :update), user: params)
assert html_response(conn, 422)
end
end
end
| 22.694444 | 68 | 0.587515 |
1c99b657f6951093b26ec912e661eb2ccbc6c134 | 884 | ex | Elixir | clients/container/lib/google_api/container/v1/metadata.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/container/lib/google_api/container/v1/metadata.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/container/lib/google_api/container/v1/metadata.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Container.V1 do
@moduledoc """
API client metadata for GoogleApi.Container.V1.
"""
@discovery_revision "20220215"
def discovery_revision(), do: @discovery_revision
end
| 32.740741 | 74 | 0.75905 |
1c99bba5d5324c8a648df92e1c9ed0faf327946f | 1,577 | ex | Elixir | lib/koala_web.ex | joonatank/koala | 71d613c2a1d26fb28078687b72522f0122a9e6ff | [
"MIT"
] | null | null | null | lib/koala_web.ex | joonatank/koala | 71d613c2a1d26fb28078687b72522f0122a9e6ff | [
"MIT"
] | null | null | null | lib/koala_web.ex | joonatank/koala | 71d613c2a1d26fb28078687b72522f0122a9e6ff | [
"MIT"
] | null | null | null | defmodule KoalaWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use KoalaWeb, :controller
use KoalaWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: KoalaWeb
import Plug.Conn
import KoalaWeb.Router.Helpers
import KoalaWeb.Gettext
end
end
def view do
quote do
use Phoenix.View, root: "lib/koala_web/templates",
namespace: KoalaWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import KoalaWeb.Router.Helpers
import KoalaWeb.ErrorHelpers
import KoalaWeb.Gettext
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import KoalaWeb.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 23.191176 | 69 | 0.681674 |
1c99ec142d3ef3f8c30801cc92098cc20e4b68fd | 210 | exs | Elixir | dharma_server/config/runtime.exs | Dharma-Network/dharma-server | 68f3ab108b38e9ef3149c9e8f0e6cd3147d931c1 | [
"MIT"
] | null | null | null | dharma_server/config/runtime.exs | Dharma-Network/dharma-server | 68f3ab108b38e9ef3149c9e8f0e6cd3147d931c1 | [
"MIT"
] | 8 | 2021-07-22T15:59:14.000Z | 2021-08-13T10:16:49.000Z | dharma_server/config/runtime.exs | Dharma-Network/dharma-server | 68f3ab108b38e9ef3149c9e8f0e6cd3147d931c1 | [
"MIT"
] | null | null | null | import Config
config :processor,
:source,
["github", "trello"]
config :extractor,
:source,
["github", "trello"]
config :rabbit,
:url,
"amqp://guest:guest@localhost"
| 15 | 37 | 0.552381 |
1c99f3eac86f7db5ed1a871bc745b8a36f9c9a57 | 1,119 | exs | Elixir | config/config.exs | sezaru/expyplot | 17dedaed998042d96293f5912d007d5ad3d0a4df | [
"MIT"
] | 30 | 2017-04-17T09:52:59.000Z | 2022-03-28T09:55:05.000Z | config/config.exs | sezaru/expyplot | 17dedaed998042d96293f5912d007d5ad3d0a4df | [
"MIT"
] | 1 | 2020-08-25T17:23:51.000Z | 2020-08-27T23:53:19.000Z | config/config.exs | sezaru/expyplot | 17dedaed998042d96293f5912d007d5ad3d0a4df | [
"MIT"
] | 4 | 2019-02-01T07:12:37.000Z | 2020-08-24T13:24:07.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :expyplot, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:expyplot, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.096774 | 73 | 0.751564 |
1c99ff8aca3a06ef9af56d8d860be67adad372db | 1,435 | exs | Elixir | mix.exs | kianmeng/microformats2-elixir | 5a48e7bc8bb82ca4b43986f8497a066b23baf6a4 | [
"MIT"
] | null | null | null | mix.exs | kianmeng/microformats2-elixir | 5a48e7bc8bb82ca4b43986f8497a066b23baf6a4 | [
"MIT"
] | null | null | null | mix.exs | kianmeng/microformats2-elixir | 5a48e7bc8bb82ca4b43986f8497a066b23baf6a4 | [
"MIT"
] | null | null | null | defmodule Microformats2.Mixfile do
use Mix.Project
def project do
[
app: :microformats2,
version: "0.7.4",
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
description: description(),
package: package(),
deps: deps()
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[extra_applications: [:logger]]
end
def description do
"""
A microformats2 parser (http://microformats.org/wiki/microformats-2) for Elixir
"""
end
def package do
[
files: ["lib", "mix.exs", "README.md", "LICENSE"],
maintainers: ["Christian Kruse"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/ckruse/microformats2-elixir"}
]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:floki, "~> 0.7"},
{:tesla, "~> 1.4.0", optional: true},
{:ex_doc, ">= 0.0.0", only: :dev},
{:jason, "~> 1.2", only: [:dev, :test]}
]
end
end
| 23.52459 | 83 | 0.576307 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.