hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ff1444094b282faa7dc22625bf75700d26a0ece5 | 589 | ex | Elixir | lib/messenger/request.ex | pilu/messenger | 664fcbe42368f9b8b41710a1b7f0d131c94c99a2 | [
"MIT"
] | 7 | 2016-04-29T17:46:35.000Z | 2018-07-04T15:48:08.000Z | lib/messenger/request.ex | gravityblast/messenger | 664fcbe42368f9b8b41710a1b7f0d131c94c99a2 | [
"MIT"
] | null | null | null | lib/messenger/request.ex | gravityblast/messenger | 664fcbe42368f9b8b41710a1b7f0d131c94c99a2 | [
"MIT"
] | null | null | null | defmodule Messenger.Request do
alias Messenger.TextMessage
alias Messenger.Recipient
alias Messenger.Request
alias Messenger.API
defstruct [recipient: %Recipient{}, message: %TextMessage{}]
def new do
%Request{}
end
def put_recipient request = %Request{}, recipient = %Recipient{} do
Map.put request, :recipient, recipient
end
def put_message request = %Request{}, message = %TextMessage{} do
Map.put request, :message, message
end
def send request = %Request{}, page_access_token do
API.send_post_request(page_access_token, request)
end
end
| 23.56 | 69 | 0.726655 |
ff148348e7e50a56ec684945832a348a286d4955 | 6,189 | ex | Elixir | clients/android_enterprise/lib/google_api/android_enterprise/v1/api/grouplicenses.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/android_enterprise/lib/google_api/android_enterprise/v1/api/grouplicenses.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/android_enterprise/lib/google_api/android_enterprise/v1/api/grouplicenses.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AndroidEnterprise.V1.Api.Grouplicenses do
@moduledoc """
API calls for all endpoints tagged `Grouplicenses`.
"""
alias GoogleApi.AndroidEnterprise.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Retrieves details of an enterprise's group license for a product.
## Parameters
* `connection` (*type:* `GoogleApi.AndroidEnterprise.V1.Connection.t`) - Connection to server
* `enterprise_id` (*type:* `String.t`) - The ID of the enterprise.
* `group_license_id` (*type:* `String.t`) - The ID of the product the group license is for, e.g. "app:com.google.android.gm".
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AndroidEnterprise.V1.Model.GroupLicense{}}` on success
* `{:error, info}` on failure
"""
@spec androidenterprise_grouplicenses_get(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.AndroidEnterprise.V1.Model.GroupLicense.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def androidenterprise_grouplicenses_get(
connection,
enterprise_id,
group_license_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/enterprises/{enterpriseId}/groupLicenses/{groupLicenseId}", %{
"enterpriseId" => URI.encode(enterprise_id, &URI.char_unreserved?/1),
"groupLicenseId" => URI.encode(group_license_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.AndroidEnterprise.V1.Model.GroupLicense{}])
end
@doc """
Retrieves IDs of all products for which the enterprise has a group license.
## Parameters
* `connection` (*type:* `GoogleApi.AndroidEnterprise.V1.Connection.t`) - Connection to server
* `enterprise_id` (*type:* `String.t`) - The ID of the enterprise.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AndroidEnterprise.V1.Model.GroupLicensesListResponse{}}` on success
* `{:error, info}` on failure
"""
@spec androidenterprise_grouplicenses_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.AndroidEnterprise.V1.Model.GroupLicensesListResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def androidenterprise_grouplicenses_list(
connection,
enterprise_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/enterprises/{enterpriseId}/groupLicenses", %{
"enterpriseId" => URI.encode(enterprise_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.AndroidEnterprise.V1.Model.GroupLicensesListResponse{}]
)
end
end
| 40.986755 | 187 | 0.647277 |
ff14885691797cb2c4d54ff1627ec3eb43ceec21 | 3,954 | ex | Elixir | clients/run/lib/google_api/run/v1/model/probe.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/run/lib/google_api/run/v1/model/probe.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/run/lib/google_api/run/v1/model/probe.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Run.V1.Model.Probe do
@moduledoc """
Not supported by Cloud Run Probe describes a health check to be performed against a container to determine whether it is alive or ready to receive traffic.
## Attributes
* `exec` (*type:* `GoogleApi.Run.V1.Model.ExecAction.t`, *default:* `nil`) - (Optional) One and only one of the following should be specified. Exec specifies the action to take. A field inlined from the Handler message.
* `failureThreshold` (*type:* `integer()`, *default:* `nil`) - (Optional) Minimum consecutive failures for the probe to be considered failed after having succeeded. Defaults to 3. Minimum value is 1.
* `httpGet` (*type:* `GoogleApi.Run.V1.Model.HTTPGetAction.t`, *default:* `nil`) - (Optional) HTTPGet specifies the http request to perform. A field inlined from the Handler message.
* `initialDelaySeconds` (*type:* `integer()`, *default:* `nil`) - (Optional) Number of seconds after the container has started before liveness probes are initiated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes
* `periodSeconds` (*type:* `integer()`, *default:* `nil`) - (Optional) How often (in seconds) to perform the probe. Default to 10 seconds. Minimum value is 1. Maximum value is 3600. Must be greater or equal than timeout_seconds.
* `successThreshold` (*type:* `integer()`, *default:* `nil`) - (Optional) Minimum consecutive successes for the probe to be considered successful after having failed. Defaults to 1. Must be 1 for liveness and startup Probes.
* `tcpSocket` (*type:* `GoogleApi.Run.V1.Model.TCPSocketAction.t`, *default:* `nil`) - (Optional) TCPSocket specifies an action involving a TCP port. TCP hooks not yet supported A field inlined from the Handler message.
* `timeoutSeconds` (*type:* `integer()`, *default:* `nil`) - (Optional) Number of seconds after which the probe times out. Defaults to 1 second. Minimum value is 1. Maximum value is 3600. Must be smaller than period_seconds. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:exec => GoogleApi.Run.V1.Model.ExecAction.t() | nil,
:failureThreshold => integer() | nil,
:httpGet => GoogleApi.Run.V1.Model.HTTPGetAction.t() | nil,
:initialDelaySeconds => integer() | nil,
:periodSeconds => integer() | nil,
:successThreshold => integer() | nil,
:tcpSocket => GoogleApi.Run.V1.Model.TCPSocketAction.t() | nil,
:timeoutSeconds => integer() | nil
}
field(:exec, as: GoogleApi.Run.V1.Model.ExecAction)
field(:failureThreshold)
field(:httpGet, as: GoogleApi.Run.V1.Model.HTTPGetAction)
field(:initialDelaySeconds)
field(:periodSeconds)
field(:successThreshold)
field(:tcpSocket, as: GoogleApi.Run.V1.Model.TCPSocketAction)
field(:timeoutSeconds)
end
defimpl Poison.Decoder, for: GoogleApi.Run.V1.Model.Probe do
def decode(value, options) do
GoogleApi.Run.V1.Model.Probe.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Run.V1.Model.Probe do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 58.147059 | 321 | 0.724077 |
ff14c1ab554a2f2571b800b08991800aa5026376 | 1,681 | ex | Elixir | apps/fz_http/lib/fz_http_web/authentication.ex | bhardwajRahul/firezone | 836bfda9e28350443f2093f810872f2bee7c6cdc | [
"Apache-2.0"
] | null | null | null | apps/fz_http/lib/fz_http_web/authentication.ex | bhardwajRahul/firezone | 836bfda9e28350443f2093f810872f2bee7c6cdc | [
"Apache-2.0"
] | null | null | null | apps/fz_http/lib/fz_http_web/authentication.ex | bhardwajRahul/firezone | 836bfda9e28350443f2093f810872f2bee7c6cdc | [
"Apache-2.0"
] | null | null | null | defmodule FzHttpWeb.Authentication do
@moduledoc """
Authentication helpers.
"""
use Guardian, otp_app: :fz_http
alias FzHttp.Telemetry
alias FzHttp.Users
alias FzHttp.Users.User
@guardian_token_name "guardian_default_token"
def subject_for_token(resource, _claims) do
{:ok, to_string(resource.id)}
end
def resource_from_claims(%{"sub" => id}) do
case Users.get_user(id) do
nil -> {:error, :resource_not_found}
user -> {:ok, user}
end
end
@doc """
Authenticates a user against a password hash. Only makes sense
for local auth.
"""
def authenticate(%User{} = user, password) when is_binary(password) do
if user.password_hash do
authenticate(
user,
password,
Argon2.verify_pass(password, user.password_hash)
)
else
{:error, :invalid_credentials}
end
end
def authenticate(_user, _password) do
authenticate(nil, nil, Argon2.no_user_verify())
end
defp authenticate(user, _password, true) do
{:ok, user}
end
defp authenticate(_user, _password, false) do
{:error, :invalid_credentials}
end
def sign_in(conn, user, auth) do
Telemetry.login()
Users.update_last_signed_in(user, auth)
__MODULE__.Plug.sign_in(conn, user)
end
def sign_out(conn) do
__MODULE__.Plug.sign_out(conn)
end
def get_current_user(%Plug.Conn{} = conn) do
__MODULE__.Plug.current_resource(conn)
end
def get_current_user(%{@guardian_token_name => token} = _session) do
case Guardian.resource_from_token(__MODULE__, token) do
{:ok, resource, _claims} ->
resource
{:error, _reason} ->
nil
end
end
end
| 22.118421 | 72 | 0.673409 |
ff14e158db0b84de77b0e410f6f6a9ee11b0734c | 308 | ex | Elixir | lib/codes/codes_b59.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_b59.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_b59.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_B59 do
alias IcdCode.ICDCode
def _B59 do
%ICDCode{full_code: "B59",
category_code: "B59",
short_code: "",
full_name: "Pneumocystosis",
short_name: "Pneumocystosis",
category_name: "Pneumocystosis"
}
end
end
| 19.25 | 41 | 0.607143 |
ff14fd7c898029447fd0a541462fae8a58b29ed4 | 705 | exs | Elixir | config/config.exs | almightycouch/gitgud | 21cf81ff29e0ec1601bb96e1621237099d0e677a | [
"MIT"
] | 449 | 2018-03-06T01:05:55.000Z | 2022-03-23T21:03:56.000Z | config/config.exs | almightycouch/gitgud | 21cf81ff29e0ec1601bb96e1621237099d0e677a | [
"MIT"
] | 69 | 2018-03-06T09:26:41.000Z | 2022-03-21T22:43:09.000Z | config/config.exs | almightycouch/gitgud | 21cf81ff29e0ec1601bb96e1621237099d0e677a | [
"MIT"
] | 41 | 2018-03-06T01:06:07.000Z | 2021-11-21T17:55:04.000Z | import Config
# By default, the umbrella project as well as each child
# application will require this configuration file, ensuring
# they all use the same configuration. While one could
# configure all applications here, we prefer to delegate
# back to each application for organization purposes.
for config <- Path.wildcard(Path.expand("../apps/*/config/config.exs", __DIR__)) do
import_config config
end
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
| 35.25 | 83 | 0.767376 |
ff1502ace4fe44d288a04d9bbb74042f41d32e6e | 1,419 | ex | Elixir | lib/mix/tasks/sugar/init.ex | pct/sugar | 772aa993dec98f26355991168186f30745dbc756 | [
"MIT"
] | 374 | 2015-01-03T00:16:25.000Z | 2022-02-02T21:08:48.000Z | lib/mix/tasks/sugar/init.ex | pct/sugar | 772aa993dec98f26355991168186f30745dbc756 | [
"MIT"
] | 44 | 2015-01-01T11:38:12.000Z | 2020-11-18T19:48:01.000Z | lib/mix/tasks/sugar/init.ex | pct/sugar | 772aa993dec98f26355991168186f30745dbc756 | [
"MIT"
] | 34 | 2015-01-04T13:42:41.000Z | 2020-07-18T17:27:27.000Z | defmodule Mix.Tasks.Sugar.Init do
use Mix.Task
import Mix.Generator
import Macro, only: [camelize: 1, underscore: 1]
@shortdoc "Creates Sugar support files"
@recursive true
@moduledoc """
Creates Sugar support files for new projects after adding Sugar as a
project dependency.
## Command line options
* `--path` - override the project path. Defaults to `lib/[app name]`
* `--priv-path` - override the priv path. Defaults to `priv`
* `--no-repo` - skip creation of Ecto Repo
"""
def run(args) do
opts = OptionParser.parse(args, switches: [no_repo: :boolean, path: :string, priv_path: :string])
do_init elem(opts, 0)
end
defp do_init(opts) do
name = camelize String.Chars.to_string(Mix.Project.config[:app])
path = "lib/#{underscore name}"
assigns = [
app: Mix.Project.config[:app],
module: name,
path: path,
priv_path: "priv"
] |> Keyword.merge(opts)
# Priviliged
create_directory "#{assigns[:priv_path]}"
create_directory "#{assigns[:priv_path]}/static"
# Support files
Mix.Tasks.Sugar.Gen.Router.run_detached assigns
# Controllers
create_directory "#{assigns[:path]}/controllers"
Mix.Tasks.Sugar.Gen.Controller.run_detached(assigns ++ [name: "main"])
# Views
create_directory "#{assigns[:path]}/views"
Mix.Tasks.Sugar.Gen.View.run_detached(assigns ++ [name: "main/index"])
end
end
| 27.288462 | 101 | 0.669486 |
ff1514a332bec9c0eec28841552f144c65267f3d | 337 | exs | Elixir | tutorials/genserver/async_otp/test/async_otp_test.exs | idfumg/ElixirSynopsis | 74c668d84300812dd41eb18772aecfb89bc7628b | [
"MIT"
] | null | null | null | tutorials/genserver/async_otp/test/async_otp_test.exs | idfumg/ElixirSynopsis | 74c668d84300812dd41eb18772aecfb89bc7628b | [
"MIT"
] | null | null | null | tutorials/genserver/async_otp/test/async_otp_test.exs | idfumg/ElixirSynopsis | 74c668d84300812dd41eb18772aecfb89bc7628b | [
"MIT"
] | null | null | null | defmodule AsyncOtpTest do
use ExUnit.Case
doctest AsyncOtp
test "async initialization success" do
assert {:ok, pid} = AsyncOtp.start_link()
end
test "send long running task but do not stop server loop working so long" do
assert {:ok, pid} = AsyncOtp.start_link()
assert :result == AsyncOtp.request(pid)
end
end
| 24.071429 | 78 | 0.712166 |
ff1514a4eb9f3c6a87f85346fe7be875fd360323 | 946 | ex | Elixir | apps/bifrost/test/support/channel_case.ex | thebugcatcher/heimdall | 5da4d2afcf80a8dabd23bbd22b19e24bcbd507a9 | [
"MIT"
] | 2 | 2021-04-03T14:10:20.000Z | 2021-07-07T21:17:35.000Z | apps/bifrost/test/support/channel_case.ex | aditya7iyengar/heimdall | 5da4d2afcf80a8dabd23bbd22b19e24bcbd507a9 | [
"MIT"
] | null | null | null | apps/bifrost/test/support/channel_case.ex | aditya7iyengar/heimdall | 5da4d2afcf80a8dabd23bbd22b19e24bcbd507a9 | [
"MIT"
] | 1 | 2020-11-12T08:38:35.000Z | 2020-11-12T08:38:35.000Z | defmodule BifrostWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use BifrostWeb.ChannelCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
import Phoenix.ChannelTest
import BifrostWeb.ChannelCase
# The default endpoint for testing
@endpoint BifrostWeb.Endpoint
end
end
setup _tags do
:ok
end
end
| 27.028571 | 64 | 0.738901 |
ff154de426251bbc2fd8d5480c84fa31a9b7fa8f | 876 | ex | Elixir | lib/liquid_voting_auth_web/controllers/auth_controller.ex | jinjagit/auth | e5b8ec515229e7087d6c63a72d50d06120c0ad94 | [
"MIT"
] | null | null | null | lib/liquid_voting_auth_web/controllers/auth_controller.ex | jinjagit/auth | e5b8ec515229e7087d6c63a72d50d06120c0ad94 | [
"MIT"
] | 21 | 2020-09-28T06:36:30.000Z | 2022-02-07T10:04:36.000Z | lib/liquid_voting_auth_web/controllers/auth_controller.ex | jinjagit/auth | e5b8ec515229e7087d6c63a72d50d06120c0ad94 | [
"MIT"
] | null | null | null | defmodule LiquidVotingAuthWeb.AuthController do
use LiquidVotingAuthWeb, :controller
alias LiquidVotingAuth.Organizations
def index(conn, _params) do
with(
[header_value] <- get_req_header(conn, "authorization"),
{:ok, auth_key} <- get_bearer(header_value)
) do
if org = registered?(auth_key) do
conn
|> put_resp_header("org-uuid", org.id)
|> send_resp(200, "")
else
conn |> send_resp(401, "")
end
else
_ -> conn |> send_resp(401, "")
end
end
defp registered?(auth_key), do: Organizations.get_organization_with_auth_key(auth_key)
defp get_bearer(header_value) do
with(
[method, auth_key] <- String.split(header_value, " ", parts: 2),
"bearer" <- String.downcase(method)
) do
{:ok, auth_key}
else
_ -> {:error, :no_bearer}
end
end
end
| 24.333333 | 88 | 0.623288 |
ff1553281707818c2e234cad08aff00b13a41fe2 | 1,173 | exs | Elixir | test/peerage_test.exs | gvl/peerage | d4a790445841aed5d0f879b13533105e8893de8a | [
"MIT"
] | null | null | null | test/peerage_test.exs | gvl/peerage | d4a790445841aed5d0f879b13533105e8893de8a | [
"MIT"
] | null | null | null | test/peerage_test.exs | gvl/peerage | d4a790445841aed5d0f879b13533105e8893de8a | [
"MIT"
] | null | null | null | defmodule PeerageTest do
use ExUnit.Case
doctest Peerage
@app :a1234
defmodule M do
def f, do: "your key is 1234. write it down."
end
def setup do
delete_all_env(@app)
:ok
end
test "Peerage.Via.Dns.poll returns a list of names" do
Application.put_env(:peerage, :dns_name, "localhost")
Application.put_env(:peerage, :app_name, "peerage")
# note: if you're not connected to a network this won't work.
# it's dns resolution...
[_addr | _rest] = Peerage.Via.Dns.poll()
end
test "Deferred config sanity check" do
# see DeferredConfig for more extensive usage
@app |> Application.put_env(:arbitrary, {:apply, {M, :f, []}})
kv =
@app
|> Application.get_all_env()
|> DeferredConfig.transform_cfg()
assert "your key" <> _ = kv[:arbitrary]
assert {:apply, _} = Application.get_env(@app, :arbitrary)
kv |> DeferredConfig.apply_transformed_cfg!(@app)
assert "your key" <> _ = Application.get_env(@app, :arbitrary)
end
defp delete_all_env(app) do
app
|> Application.get_all_env()
|> Enum.each(fn {k, _} ->
Application.delete_env(app, k)
end)
end
end
| 24.4375 | 66 | 0.645354 |
ff156a9f2d2951ec8e8b3ec544ee9191dd7d07c8 | 1,247 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/url_map_list.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/url_map_list.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/url_map_list.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.UrlMapList do
@moduledoc """
Contains a list of UrlMap resources.
"""
@derive [Poison.Encoder]
defstruct [
:"id",
:"items",
:"kind",
:"nextPageToken",
:"selfLink"
]
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.UrlMapList do
import GoogleApi.Compute.V1.Deserializer
def decode(value, options) do
value
|> deserialize(:"items", :list, GoogleApi.Compute.V1.Model.UrlMap, options)
end
end
| 29 | 79 | 0.729751 |
ff15bd12694a9b8d6fbf2acee4871c1c4c046e37 | 2,714 | exs | Elixir | test/phoenix/live_dashboard/telemetry_listener_test.exs | PaulOstazeski/phoenix_live_dashboard | c68eac2640d6579d16f222fe58c1eb669342a1f2 | [
"MIT"
] | 1 | 2020-11-04T16:18:16.000Z | 2020-11-04T16:18:16.000Z | test/phoenix/live_dashboard/telemetry_listener_test.exs | PaulOstazeski/phoenix_live_dashboard | c68eac2640d6579d16f222fe58c1eb669342a1f2 | [
"MIT"
] | null | null | null | test/phoenix/live_dashboard/telemetry_listener_test.exs | PaulOstazeski/phoenix_live_dashboard | c68eac2640d6579d16f222fe58c1eb669342a1f2 | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveDashboard.TelemetryListenerTest do
use ExUnit.Case, async: true
import Telemetry.Metrics
alias Phoenix.LiveDashboard.TelemetryListener
test "forwards the given metrics" do
time = System.system_time(:second)
TelemetryListener.listen(node(), [counter("a.b.c"), counter("d.e.g")])
:telemetry.execute([:a, :b], %{c: 100}, %{})
assert_receive {:telemetry, [{0, nil, 100, event_time}]} when event_time >= time
:telemetry.execute([:d, :e], %{g: 200}, %{})
assert_receive {:telemetry, [{1, nil, 200, event_time}]} when event_time >= time
end
test "does not forward the metric if measurement is missing" do
TelemetryListener.listen(node(), [counter("a.b.c")])
:telemetry.execute([:a, :b], %{d: 100}, %{})
:telemetry.execute([:a, :b], %{c: 200}, %{})
assert_receive {:telemetry, [{0, nil, 200, _}]}
refute_received {:telemetry, [{0, nil, 100, _}]}
end
test "does not forward the metric if skipping missing" do
TelemetryListener.listen(node(), [counter("a.b.c", keep: & &1.keep?)])
:telemetry.execute([:a, :b], %{c: 200}, %{keep?: false})
:telemetry.execute([:a, :b], %{c: 100}, %{keep?: true})
assert_receive {:telemetry, [{0, nil, 100, _}]}
refute_received {:telemetry, [{0, nil, 200, _}]}
end
test "uses custom measurement" do
TelemetryListener.listen(node(), [counter("a.b.c", measurement: &(&1.c * 2))])
:telemetry.execute([:a, :b], %{c: 100}, %{})
assert_receive {:telemetry, [{0, nil, 200, _}]}
end
test "uses custom measurement with measurement + metadata" do
TelemetryListener.listen(node(), [counter("a.b.c", measurement: &(&1.c + &2.d))])
:telemetry.execute([:a, :b], %{c: 100}, %{d: 200})
assert_receive {:telemetry, [{0, nil, 300, _}]}
end
test "converts tags and tag values to labels" do
metric =
counter("a.b.c", tag_values: &Map.put(&1, :extra, :tag), tags: [:given, :extra, :unknown])
TelemetryListener.listen(node(), [metric])
:telemetry.execute([:a, :b], %{c: 100}, %{given: "given"})
assert_receive {:telemetry, [{0, "given tag", 100, _}]}
end
test "groups by event name" do
TelemetryListener.listen(node(), [counter("a.b.c"), counter("a.b.d")])
:telemetry.execute([:a, :b], %{c: 100, d: 200}, %{})
assert_receive {:telemetry, [{0, nil, 100, time}, {1, nil, 200, time}]}
end
test "stops if the receiver process terminates" do
parent = self()
Task.start_link(fn ->
{:ok, pid} = TelemetryListener.listen(node(), [counter("a.b.c")])
send(parent, {:listener, pid})
end)
assert_receive {:listener, pid}
ref = Process.monitor(pid)
assert_receive {:DOWN, ^ref, _, _, _}
end
end
| 35.710526 | 96 | 0.616433 |
ff15ce20d48380be128b7dcec3087fa205800278 | 3,206 | exs | Elixir | mix.exs | nicolasblanco/phoenix_live_view | f2bd37302f704fca3c1e4f8db7a1cbf8ddf012f2 | [
"MIT"
] | null | null | null | mix.exs | nicolasblanco/phoenix_live_view | f2bd37302f704fca3c1e4f8db7a1cbf8ddf012f2 | [
"MIT"
] | null | null | null | mix.exs | nicolasblanco/phoenix_live_view | f2bd37302f704fca3c1e4f8db7a1cbf8ddf012f2 | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveView.MixProject do
use Mix.Project
@version "0.15.2"
def project do
[
app: :phoenix_live_view,
version: @version,
elixir: "~> 1.7",
start_permanent: Mix.env() == :prod,
elixirc_paths: elixirc_paths(Mix.env()),
compilers: compilers(Mix.env()),
package: package(),
xref: [exclude: [Floki]],
deps: deps(),
docs: docs(),
name: "Phoenix LiveView",
homepage_url: "http://www.phoenixframework.org",
description: """
Rich, real-time user experiences with server-rendered HTML
"""
]
end
defp compilers(:test), do: [:phoenix] ++ Mix.compilers()
defp compilers(_), do: Mix.compilers()
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
def application do
[
extra_applications: [:logger],
mod: {Phoenix.LiveView.Application, []}
]
end
defp deps do
[
{:phoenix, "~> 1.5.7"},
{:phoenix_html, "~> 2.14"},
{:telemetry, "~> 0.4.2 or ~> 0.5"},
{:jason, "~> 1.0", optional: true},
{:ex_doc, "~> 0.22", only: :docs},
{:floki, "~> 0.27.0", only: :test},
{:html_entities, ">= 0.0.0", only: :test}
]
end
defp docs do
[
main: "Phoenix.LiveView",
source_ref: "v#{@version}",
source_url: "https://github.com/phoenixframework/phoenix_live_view",
extra_section: "GUIDES",
extras: extras(),
groups_for_extras: groups_for_extras(),
groups_for_modules: groups_for_modules()
]
end
defp extras do
[
"guides/introduction/installation.md",
"guides/client/bindings.md",
"guides/client/form-bindings.md",
"guides/client/dom-patching.md",
"guides/client/js-interop.md",
"guides/client/uploads-external.md",
"guides/server/assigns-eex.md",
"guides/server/error-handling.md",
"guides/server/live-layouts.md",
"guides/server/live-navigation.md",
"guides/server/security-model.md",
"guides/server/telemetry.md",
"guides/server/uploads.md",
"guides/server/using-gettext.md"
]
end
defp groups_for_extras do
[
Introduction: ~r/guides\/introduction\/.?/,
"Server-side features": ~r/guides\/server\/.?/,
"Client-side integration": ~r/guides\/client\/.?/
]
end
defp groups_for_modules do
[
"Upload structures": [
Phoenix.LiveView.UploadConfig,
Phoenix.LiveView.UploadEntry
],
"Testing structures": [
Phoenix.LiveViewTest.Element,
Phoenix.LiveViewTest.Upload,
Phoenix.LiveViewTest.View
],
"Live EEx Engine": [
Phoenix.LiveView.Engine,
Phoenix.LiveView.Component,
Phoenix.LiveView.Rendered,
Phoenix.LiveView.Comprehension
]
]
end
defp package do
[
maintainers: ["Chris McCord", "José Valim", "Gary Rennie", "Alex Garibay", "Scott Newcomer"],
licenses: ["MIT"],
links: %{github: "https://github.com/phoenixframework/phoenix_live_view"},
files:
~w(assets/js lib priv) ++
~w(CHANGELOG.md LICENSE.md mix.exs package.json README.md)
]
end
end
| 26.495868 | 99 | 0.595446 |
ff15f7dfd2558076d5bc3934248dad5b8a9970ff | 2,346 | exs | Elixir | config/test.exs | pwjablonski/remote_retro | 1b6acf359b362ae2ec187cfbd1a38dc8266aa546 | [
"MIT"
] | null | null | null | config/test.exs | pwjablonski/remote_retro | 1b6acf359b362ae2ec187cfbd1a38dc8266aa546 | [
"MIT"
] | null | null | null | config/test.exs | pwjablonski/remote_retro | 1b6acf359b362ae2ec187cfbd1a38dc8266aa546 | [
"MIT"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :remote_retro, RemoteRetroWeb.Endpoint,
http: [port: 4001],
server: true
config :remote_retro, :sql_sandbox, true
config :wallaby, screenshot_on_failure: true
config :wallaby, driver: Wallaby.Experimental.Chrome
config :bamboo, :refute_timeout, 10
{:ok, file} = File.open("browser_logs.log", [:write])
Application.put_env(:wallaby, :js_logger, file)
# Print only warnings and errors during test
config :logger, level: :warn
# allow test users to authenticate
config :remote_retro, :auth_controller, RemoteRetroWeb.MockAuthController
config :honeybadger,
environment_name: :test
config :remote_retro, RemoteRetro.Mailer, adapter: Bamboo.TestAdapter
# Configure your database
config :remote_retro, RemoteRetro.Repo,
username: "postgres",
password: "postgres",
database: "remote_retro_test",
hostname: "localhost",
ownership_timeout: 60_000,
pool: Ecto.Adapters.SQL.Sandbox
config :remote_retro, :oauth_client, RemoteRetro.OAuth.Client.InMemory
config :remote_retro, :allow_user_masquerade, true
config :remote_retro, :mock_google_user_info, %{
"email" => "mrtestuser@one.com",
"email_verified" => "true",
"family_name" => "User",
"gender" => "male",
"given_name" => "Test",
"kind" => "plus#personOpenIdConnect",
"locale" => "en",
"name" => "Test User",
"picture" => "https://lh3.googleusercontent.com/a-/AAuE7mBrZMjcPzGHlf3FroPgxpVoVxt33dY3L8l8o4ncoj1GgIDVGMvtPn8Zvz26oo0CFAbmI5gPSEJzJrK9Nxma-6_Qhop6u-1JK8_-K3LLtLj1ZDic6xx9YeGUDsHEF3VrjqzSvoQWkIsEHVsUnTOogh4EuVUMSB-8ftR-bjfZROHxy4Py_4WAn773RKF9ZTbpb7ajTHkBwS7o5GF3riAPEJ9f3XUD9dASlSpRjYq7_hWzXPSAQ3on-A16bKUtily8RprssgIAc63D21XxYqkulhXhUDgDjVMVhhXmgCj1rwzBV_jd8CCJlQx7dJ4Tn5gl2Ur00TFmrKIx0-1FDE8Kiiu6wRmf7rXEFN450zW0PqRjkttiYQj3HdbiPfFOVqvlKcp_4I9o9NwqbdQWhyO_cvAhCAa9B8s8vSc5Dtg5qfA389KnRJu9hPYPhYsUc1bFSLebKG-VKUPhyzMue8Q5pTWeystzI6Zs_ALxpbobS7wPRBE_s48pV5vFWbumWTeRxc00rvINs88unMwsMS8Vet4LfvEdIm00mp5aePI73hLQXVzKI0o4XTMmKKGOM2WhZxag3WVvjMZAfGExvScPNerOyK3pCNK4RLI2DQMfvSIJENlX155kmqYafB7-bJ_pzlDdbZaKA6ShGam9UMuKnIeAHBbKW3c-9blUIu4d92fOMfLWFASzz3YSC9p5OUCe_3wexVQ9NSMhwWNr0LDQ04gxt2X3AOoaLyYH_t8H1mnBW0z0jRC0FxmK5-r-xLY",
"profile" => "https://plus.google.com/108658712426577966861",
"sub" => "108658712426577966861",
}
| 43.444444 | 826 | 0.817136 |
ff15f84400130243a951740ad654073a38203fc1 | 1,823 | ex | Elixir | implementations/elixir/ockam/ockam/lib/ockam/examples/routing/tcp.ex | twittner/ockam | 96eadf99da42f7c35539c6e29010a657c579ccba | [
"Apache-2.0"
] | null | null | null | implementations/elixir/ockam/ockam/lib/ockam/examples/routing/tcp.ex | twittner/ockam | 96eadf99da42f7c35539c6e29010a657c579ccba | [
"Apache-2.0"
] | 10 | 2022-02-23T06:44:58.000Z | 2022-03-23T21:04:37.000Z | implementations/elixir/ockam/ockam/lib/ockam/examples/routing/tcp.ex | twittner/ockam | 96eadf99da42f7c35539c6e29010a657c579ccba | [
"Apache-2.0"
] | null | null | null | defmodule Ockam.Examples.Routing.TCP do
@moduledoc """
Example message routing through the TCP transport
server() - start TCP server with echoer
server(port) - start TCP server on port
client() - establish a connection and send a message to the echoer and wait for response
client(host, port) - connect to transport on host:port and send a message and wait for response
send_and_wait(client, message) - send message to existing client connection and wait for response
"""
alias Ockam.Examples.Echoer
alias Ockam.Transport.TCP
alias Ockam.Transport.TCPAddress
require Logger
def server(port \\ 4000) do
## Start a transport with listener on port
TCP.start(listen: [port: port])
Echoer.create(address: "echoer")
end
def client(host \\ "localhost", port \\ 4000) do
## Start a transport without a listener
TCP.start()
server_host_address = TCPAddress.new(host, port)
## Register this process to receive messages
my_address = "example_run"
Ockam.Node.register_address(my_address)
Ockam.Router.route(%{
onward_route: [server_host_address, "echoer"],
payload: "Hello localhost!",
return_route: [my_address]
})
receive do
%{onward_route: [^my_address], return_route: [tcp_client | _], payload: "Hello localhost!"} =
reply ->
Logger.info("Received message: #{inspect(reply)}")
{:ok, tcp_client}
end
end
def send_and_wait(client, message, return_address \\ "example_run") do
Ockam.Router.route(%{
onward_route: [client, "echoer"],
payload: message,
return_route: [return_address]
})
receive do
%{onward_route: [^return_address], payload: ^message} = reply ->
Logger.info("Received message: #{inspect(reply)}")
:ok
end
end
end
| 28.936508 | 99 | 0.676906 |
ff16034665b85433208285a8a65dc16c4c25da1c | 4,456 | ex | Elixir | lib/vintage_net/resolver/resolv_conf.ex | fhunleth/vintage_net | 215495533cb642eeb172daba08208a454f19b36f | [
"Apache-2.0"
] | 1 | 2019-05-08T19:09:17.000Z | 2019-05-08T19:09:17.000Z | lib/vintage_net/resolver/resolv_conf.ex | fhunleth/vintage_net | 215495533cb642eeb172daba08208a454f19b36f | [
"Apache-2.0"
] | null | null | null | lib/vintage_net/resolver/resolv_conf.ex | fhunleth/vintage_net | 215495533cb642eeb172daba08208a454f19b36f | [
"Apache-2.0"
] | null | null | null | defmodule VintageNet.Resolver.ResolvConf do
@moduledoc """
Utilities for creating resolv.conf file contents
"""
alias VintageNet.IP
# Convert name resolver configurations into
# /etc/resolv.conf contents
@typedoc "Name resolver settings for an interface"
@type entry :: %{
priority: integer(),
domain: String.t(),
name_servers: [:inet.ip_address()]
}
@typedoc "All entries"
@type entry_map :: %{VintageNet.ifname() => entry()}
@type additional_name_servers :: [:inet.ip_address()]
@type name_server_info :: %{address: :inet.ip_address(), from: [:global | VintageNet.ifname()]}
@doc """
Convert the name server information to resolv.conf contents
"""
@spec to_config(entry_map(), additional_name_servers()) :: iolist()
def to_config(entries, additional_name_servers) do
domains = Enum.reduce(entries, %{}, &add_domain/2)
name_servers = to_name_server_list(entries, additional_name_servers)
[
"# This file is managed by VintageNet. Do not edit.\n\n",
Enum.map(domains, &domain_text/1),
Enum.map(name_servers, &name_server_text/1)
]
end
@spec to_name_server_list(entry_map(), additional_name_servers) :: [name_server_info()]
def to_name_server_list(entries, additional_name_servers) do
# This is trickier than it looks since we want the ordering of name
# servers to be deterministic. Here are the rules:
#
# 1. No duplicates entries (interfaces can supply similar entries to this is a common case)
# 2. Entries listed in the order supplied if possible. It's possible that
# that two interfaces specify the same entries in a different order, so don't try to fix that.
# 3. Global entries are always first
Enum.reduce(entries, %{}, &add_name_servers(&2, &1))
|> add_name_servers({:global, %{name_servers: additional_name_servers}})
|> Enum.map(&sort_ifname_lists/1)
|> Enum.sort(&name_server_lte/2)
|> Enum.map(fn {address, ifname_tuples} ->
%{address: address, from: Enum.map(ifname_tuples, fn {ifname, _ix} -> ifname end)}
end)
end
defp add_domain({ifname, %{domain: domain}}, acc) when is_binary(domain) and domain != "" do
Map.update(acc, domain, [ifname], fn ifnames -> [ifname | ifnames] end)
end
defp add_domain(_other, acc), do: acc
defp add_name_servers(name_servers, {ifname, %{name_servers: servers}}) do
indexed_servers = Enum.with_index(servers)
Enum.reduce(indexed_servers, name_servers, &add_name_server(ifname, &1, &2))
end
defp add_name_server(ifname, {server, index}, acc) do
ifname_index = {ifname, index}
Map.update(acc, server, [ifname_index], fn ifnames -> [ifname_index | ifnames] end)
end
defp sort_ifname_lists({ns, ifname_index_list}) do
sorted_list = Enum.sort(ifname_index_list, &ifname_ix_compare/2)
{ns, sorted_list}
end
defp ifname_ix_compare({ifname, ix1}, {ifname, ix2}), do: ix1 <= ix2
defp ifname_ix_compare({:global, _ix1}, _not_global), do: true
defp ifname_ix_compare(_not_global, {:global, _ix2}), do: false
defp ifname_ix_compare({ifname1, _ix1}, {ifname2, _ix2}), do: ifname1 <= ifname2
defp name_server_lte(
{_ns1, ifname_index_list1} = a,
{_ns2, ifname_index_list2} = b,
ifname \\ :global
) do
index1 = find_ifname_index(ifname_index_list1, ifname)
index2 = find_ifname_index(ifname_index_list2, ifname)
case {index1, index2} do
{nil, nil} ->
# The lists are sorted by this point so arbitrarily pick
# the first one's list to choose the ifname to compare.
# Note that the recursive call is guaranteed not to hit
# this case again since index1 will be found.
[{first_ifname, _index} | _] = ifname_index_list1
name_server_lte(a, b, first_ifname)
{nil, _not_nil} ->
false
{_not_nil, nil} ->
true
{_not_nil, _not_nil2} ->
index1 <= index2
end
end
defp find_ifname_index([], _ifname), do: nil
defp find_ifname_index([{ifname, index} | _rest], ifname), do: index
defp find_ifname_index([_no | rest], ifname), do: find_ifname_index(rest, ifname)
defp domain_text({domain, ifnames}),
do: ["search ", domain, " # From ", Enum.join(ifnames, ","), "\n"]
defp name_server_text(%{address: address, from: ifnames}) do
["nameserver ", IP.ip_to_string(address), " # From ", Enum.join(ifnames, ","), "\n"]
end
end
| 36.826446 | 101 | 0.675943 |
ff165148b41c03ab67fdc6355b9aac3d7db020ed | 15,270 | ex | Elixir | lib/phoenix_live_view/router.ex | tomconroy/phoenix_live_view | 655e95170e3e1b8fead1ffe43a661c7f5aa6b519 | [
"MIT"
] | null | null | null | lib/phoenix_live_view/router.ex | tomconroy/phoenix_live_view | 655e95170e3e1b8fead1ffe43a661c7f5aa6b519 | [
"MIT"
] | 4 | 2021-03-04T13:00:52.000Z | 2021-03-12T12:42:09.000Z | deps/phoenix_live_view/lib/phoenix_live_view/router.ex | adrianomota/blog | ef3b2d2ed54f038368ead8234d76c18983caa75b | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveView.Router do
@moduledoc """
Provides LiveView routing for Phoenix routers.
"""
@cookie_key "__phoenix_flash__"
@doc """
Defines a LiveView route.
A LiveView can be routed to by using the `live` macro with a path and
the name of the LiveView:
live "/thermostat", ThermostatLive
By default, you can generate a route to this LiveView by using the `live_path` helper:
live_path(@socket, ThermostatLive)
## Actions and live navigation
It is common for a LiveView to have multiple states and multiple URLs.
For example, you can have a single LiveView that lists all articles on
your web app. For each article there is an "Edit" button which, when
pressed, opens up a modal on the same page to edit the article. It is a
best practice to use live navigation in those cases, so when you click
edit, the URL changes to "/articles/1/edit", even though you are still
within the same LiveView. Similarly, you may also want to show a "New"
button, which opens up the modal to create new entries, and you want
this to be reflected in the URL as "/articles/new".
In order to make it easier to recognize the current "action" your
LiveView is on, you can pass the action option when defining LiveViews
too:
live "/articles", ArticleLive.Index, :index
live "/articles/new", ArticleLive.Index, :new
live "/articles/:id/edit", ArticleLive.Index, :edit
When an action is given, the generated route helpers are named after
the LiveView itself (in the same way as for a controller). For the example
above, we will have:
article_index_path(@socket, :index)
article_index_path(@socket, :new)
article_index_path(@socket, :edit, 123)
The current action will always be available inside the LiveView as
the `@live_action` assign, that can be used to render a LiveComponent:
<%= if @live_action == :new do %>
<%= live_component MyAppWeb.ArticleLive.FormComponent %>
<% end %>
Or can be used to show or hide parts of the template:
<%= if @live_action == :edit do %>
<%= render("form.html", user: @user) %>
<% end %>
Note that `@live_action` will be `nil` if no action is given on the route definition.
## Options
* `:container` - an optional tuple for the HTML tag and DOM attributes to
be used for the LiveView container. For example: `{:li, style: "color: blue;"}`.
See `Phoenix.LiveView.Helpers.live_render/3` for more information and examples.
* `:as` - optionally configures the named helper. Defaults to `:live` when
using a LiveView without actions or defaults to the LiveView name when using
actions.
* `:metadata` - a map to optional feed metadata used on telemetry events and route info,
for example: `%{route_name: :foo, access: :user}`.
* `:private` - an optional map of private data to put in the plug connection.
for example: `%{route_name: :foo, access: :user}`.
## Examples
defmodule MyApp.Router
use Phoenix.Router
import Phoenix.LiveView.Router
scope "/", MyApp do
pipe_through [:browser]
live "/thermostat", ThermostatLive
live "/clock", ClockLive
live "/dashboard", DashboardLive, container: {:main, class: "row"}
end
end
iex> MyApp.Router.Helpers.live_path(MyApp.Endpoint, MyApp.ThermostatLive)
"/thermostat"
"""
defmacro live(path, live_view, action \\ nil, opts \\ []) do
quote bind_quoted: binding() do
{action, router_options} =
Phoenix.LiveView.Router.__live__(__MODULE__, live_view, action, opts)
Phoenix.Router.get(path, Phoenix.LiveView.Plug, action, router_options)
end
end
@doc """
Defines a live session for live redirects within a group of live routes.
`live_session/3` allow routes defined with `live/4` to support
`live_redirect` from the client with navigation purely over the existing
websocket connection. This allows live routes defined in the router to
mount a new root LiveView without additional HTTP requests to the server.
## Security Considerations
You must always perform authentication and authorization in your LiveViews.
If your application handle both regular HTTP requests and LiveViews, then
you must perform authentication and authorization on both. This is important
because `live_redirect`s *do not go through the plug pipeline*.
`live_session` can be used to draw boundaries between groups of LiveViews.
Redirecting between `live_session`s will always force a full page reload
and establish a brand new LiveView connection. This is useful when LiveViews
require different authentication strategies or simply when they use different
root layouts (as the root layout is not updated between live redirects).
Please [read our guide on the security model](security-model.md) for a
detailed description and general tips on authentication, authorization,
and more.
## Options
* `:session` - The optional extra session map or MFA tuple to be merged with
the LiveView session. For example, `%{"admin" => true}`, `{MyMod, :session, []}`.
For MFA, the function is invoked, passing the `Plug.Conn` struct is prepended
to the arguments list.
* `:root_layout` - The optional root layout tuple for the initial HTTP render to
override any existing root layout set in the router.
* `:on_mount` - The optional list of hooks to attach to the mount lifecycle _of
each LiveView in the session_. See `Phoenix.LiveView.on_mount/1`. Passing a
single value is also accepted.
## Examples
scope "/", MyAppWeb do
pipe_through :browser
live_session :default do
live "/feed", FeedLive, :index
live "/status", StatusLive, :index
live "/status/:id", StatusLive, :show
end
live_session :admin, on_mount: MyAppWeb.AdminLiveAuth do
live "/admin", AdminDashboardLive, :index
live "/admin/posts", AdminPostLive, :index
end
end
In the example above, we have two live sessions. Live navigation between live views
in the different sessions is not possible and will always require a full page reload.
This is important in the example above because the `:admin` live session has authentication
requirements, defined by `on_mount: MyAppWeb.AdminLiveAuth`, that the other LiveViews
do not have.
If you have both regular HTTP routes (via get, post, etc) and `live` routes, then
you need to perform the same authentication and authorization rules in both.
For example, if you were to add a `get "/admin/health"` entry point inside the
`:admin` live session above, then you must create your own plug that performs the
same authentication and authorization rules as `MyAppWeb.AdminLiveAuth`, and then
pipe through it:
live_session :admin, on_mount: MyAppWeb.AdminLiveAuth do
# Regular routes
pipe_through [MyAppWeb.AdminPlugAuth]
get "/admin/health"
# Live routes
live "/admin", AdminDashboardLive, :index
live "/admin/posts", AdminPostLive, :index
end
The opposite is also true, if you have regular http routes and you want to
add your own `live` routes, the same authentication and authorization checks
executed by the plugs listed in `pipe_through` must be ported to LiveViews
and be executed via `on_mount` hooks.
"""
defmacro live_session(name, opts \\ [], do: block) do
opts =
if Macro.quoted_literal?(opts) do
Macro.prewalk(opts, &expand_alias(&1, __CALLER__))
else
opts
end
quote do
unquote(__MODULE__).__live_session__(__MODULE__, unquote(opts), unquote(name))
unquote(block)
Module.delete_attribute(__MODULE__, :phoenix_live_session_current)
end
end
defp expand_alias({:__aliases__, _, _} = alias, env),
do: Macro.expand(alias, %{env | function: {:mount, 3}})
defp expand_alias(other, _env), do: other
@doc false
def __live_session__(module, opts, name) do
Module.register_attribute(module, :phoenix_live_sessions, accumulate: true)
vsn = session_vsn(module)
unless is_atom(name) do
raise ArgumentError, """
expected live_session name to be an atom, got: #{inspect(name)}
"""
end
extra = validate_live_session_opts(opts, module, name)
if nested = Module.get_attribute(module, :phoenix_live_session_current) do
raise """
attempting to define live_session #{inspect(name)} inside #{inspect(nested.name)}.
live_session definitions cannot be nested.
"""
end
live_sessions = Module.get_attribute(module, :phoenix_live_sessions)
existing = Enum.find(live_sessions, fn %{name: existing_name} -> name == existing_name end)
if existing do
raise """
attempting to redefine live_session #{inspect(name)}.
live_session routes must be declared in a single named block.
"""
end
Module.put_attribute(module, :phoenix_live_session_current, %{name: name, extra: extra, vsn: vsn})
Module.put_attribute(module, :phoenix_live_sessions, %{name: name, extra: extra, vsn: vsn})
end
@live_session_opts [:on_mount, :root_layout, :session]
defp validate_live_session_opts(opts, module, _name) when is_list(opts) do
opts
|> Keyword.put_new(:session, %{})
|> Enum.reduce(%{}, fn
{:session, val}, acc when is_map(val) or (is_tuple(val) and tuple_size(val) == 3) ->
Map.put(acc, :session, val)
{:session, bad_session}, _acc ->
raise ArgumentError, """
invalid live_session :session
expected a map with string keys or an MFA tuple, got #{inspect(bad_session)}
"""
{:root_layout, {mod, template}}, acc when is_atom(mod) and is_binary(template) ->
Map.put(acc, :root_layout, {mod, template})
{:root_layout, {mod, template}}, acc when is_atom(mod) and is_atom(template) ->
Map.put(acc, :root_layout, {mod, "#{template}.html"})
{:root_layout, false}, acc ->
Map.put(acc, :root_layout, false)
{:root_layout, bad_layout}, _acc ->
raise ArgumentError, """
invalid live_session :root_layout
expected a tuple with the view module and template string or atom name, got #{inspect(bad_layout)}
"""
{:on_mount, on_mount}, acc ->
hooks = Enum.map(List.wrap(on_mount), &Phoenix.LiveView.Lifecycle.on_mount(module, &1))
Map.put(acc, :on_mount, hooks)
{key, _val}, _acc ->
raise ArgumentError, """
unknown live_session option "#{inspect(key)}"
Supported options include: #{inspect(@live_session_opts)}
"""
end)
end
defp validate_live_session_opts(invalid, _module, name) do
raise ArgumentError, """
expected second argument to live_session to be a list of options, got:
live_session #{inspect(name)}, #{inspect(invalid)}
"""
end
@doc """
Fetches the LiveView and merges with the controller flash.
Replaces the default `:fetch_flash` plug used by `Phoenix.Router`.
## Examples
defmodule AppWeb.Router do
use LiveGenWeb, :router
import Phoenix.LiveView.Router
pipeline :browser do
...
plug :fetch_live_flash
end
...
end
"""
def fetch_live_flash(%Plug.Conn{} = conn, _) do
case cookie_flash(conn) do
{conn, nil} ->
Phoenix.Controller.fetch_flash(conn, [])
{conn, flash} ->
conn
|> Phoenix.Controller.fetch_flash([])
|> Phoenix.Controller.merge_flash(flash)
end
end
@doc false
def __live__(router, live_view, action, opts)
when is_list(action) and is_list(opts) do
__live__(router, live_view, nil, Keyword.merge(action, opts))
end
def __live__(router, live_view, action, opts)
when is_atom(action) and is_list(opts) do
live_session =
Module.get_attribute(router, :phoenix_live_session_current) ||
%{name: :default, extra: %{session: %{}}, vsn: session_vsn(router)}
live_view = Phoenix.Router.scoped_alias(router, live_view)
{private, metadata, opts} = validate_live_opts!(opts)
opts =
opts
|> Keyword.put(:router, router)
|> Keyword.put(:action, action)
{as_helper, as_action} = inferred_as(live_view, opts[:as], action)
{as_action,
alias: false,
as: as_helper,
private: Map.put(private, :phoenix_live_view, {live_view, opts, live_session}),
metadata: Map.put(metadata, :phoenix_live_view, {live_view, action, opts, live_session})}
end
defp validate_live_opts!(opts) do
{private, opts} = Keyword.pop(opts, :private, %{})
{metadata, opts} = Keyword.pop(opts, :metadata, %{})
Enum.each(opts, fn
{:container, {tag, attrs}} when is_atom(tag) and is_list(attrs) ->
:ok
{:container, val} ->
raise ArgumentError, """
expected live :container to be a tuple matching {atom, attrs :: list}, got: #{inspect(val)}
"""
{:as, as} when is_atom(as) ->
:ok
{:as, bad_val} ->
raise ArgumentError, """
expected live :as to be an atom, got: #{inspect(bad_val)}
"""
{key, %{} = meta} when key in [:metadata, :private] and is_map(meta) ->
:ok
{key, bad_val} when key in [:metadata, :private] ->
raise ArgumentError, """
expected live :#{key} to be a map, got: #{inspect(bad_val)}
"""
{key, val} ->
raise ArgumentError, """
unknown live option :#{key}.
Supported options include: :container, :as, :metadata, :private.
Got: #{inspect([{key, val}])}
"""
end)
{private, metadata, opts}
end
defp inferred_as(live_view, as, nil), do: {as || :live, live_view}
defp inferred_as(live_view, nil, action) do
live_view
|> Module.split()
|> Enum.drop_while(&(not String.ends_with?(&1, "Live")))
|> Enum.map(&(&1 |> String.replace_suffix("Live", "") |> Macro.underscore()))
|> Enum.reject(&(&1 == ""))
|> Enum.join("_")
|> case do
"" ->
raise ArgumentError,
"could not infer :as option because a live action was given and the LiveView " <>
"does not have a \"Live\" suffix. Please pass :as explicitly or make sure your " <>
"LiveView is named like \"FooLive\" or \"FooLive.Index\""
as ->
{String.to_atom(as), action}
end
end
defp inferred_as(_live_view, as, action), do: {as, action}
defp cookie_flash(%Plug.Conn{cookies: %{@cookie_key => token}} = conn) do
endpoint = Phoenix.Controller.endpoint_module(conn)
flash =
case Phoenix.LiveView.Utils.verify_flash(endpoint, token) do
%{} = flash when flash != %{} -> flash
%{} -> nil
end
{Plug.Conn.delete_resp_cookie(conn, @cookie_key), flash}
end
defp cookie_flash(%Plug.Conn{} = conn), do: {conn, nil}
defp session_vsn(module) do
if vsn = Module.get_attribute(module, :phoenix_session_vsn) do
vsn
else
vsn = System.system_time()
Module.put_attribute(module, :phoenix_session_vsn, vsn)
vsn
end
end
end
| 34.314607 | 106 | 0.663851 |
ff16593195fe7ab635d4b6e3bdf5089be1fc3701 | 728 | ex | Elixir | lib/asciinema_web/controllers/page_controller.ex | sergey-nechaev/asciinema-server | 64f87acec9c9b45fefa8bed8f70b41f0068ccb2b | [
"Apache-2.0"
] | null | null | null | lib/asciinema_web/controllers/page_controller.ex | sergey-nechaev/asciinema-server | 64f87acec9c9b45fefa8bed8f70b41f0068ccb2b | [
"Apache-2.0"
] | null | null | null | lib/asciinema_web/controllers/page_controller.ex | sergey-nechaev/asciinema-server | 64f87acec9c9b45fefa8bed8f70b41f0068ccb2b | [
"Apache-2.0"
] | null | null | null | defmodule AsciinemaWeb.PageController do
use AsciinemaWeb, :controller
plug :put_layout, :app2
def about(conn, _params) do
conn
|> assign(:page_title, "About")
|> render("about.html")
end
def privacy(conn, _params) do
conn
|> assign(:page_title, "Privacy Policy")
|> render("privacy.html")
end
def tos(conn, _params) do
conn
|> assign(:page_title, "Terms of Service")
|> render("tos.html")
end
def contact(conn, _params) do
conn
|> assign(:page_title, "Contact")
|> render("contact.html") # TODO rename to Community
end
def contributing(conn, _params) do
conn
|> assign(:page_title, "Contributing")
|> render("contributing.html")
end
end
| 20.222222 | 56 | 0.642857 |
ff1668396fb5cc0ec8f6fc98c6ec1ebcef62a24d | 216 | exs | Elixir | test/coop_minesweeper_web/controllers/page_controller_test.exs | sdoering01/coop_minesweeper | 4fc7265c3734584f93a325c2ceaa172da578f7a5 | [
"MIT"
] | null | null | null | test/coop_minesweeper_web/controllers/page_controller_test.exs | sdoering01/coop_minesweeper | 4fc7265c3734584f93a325c2ceaa172da578f7a5 | [
"MIT"
] | null | null | null | test/coop_minesweeper_web/controllers/page_controller_test.exs | sdoering01/coop_minesweeper | 4fc7265c3734584f93a325c2ceaa172da578f7a5 | [
"MIT"
] | null | null | null | defmodule CoopMinesweeperWeb.PageControllerTest do
use CoopMinesweeperWeb.ConnCase
test "GET /", %{conn: conn} do
conn = get(conn, "/")
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 24 | 60 | 0.703704 |
ff166ee5135a05fe50b82585a85b68bee3eb8ca9 | 1,619 | ex | Elixir | clients/people/lib/google_api/people/v1/model/interest.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/people/lib/google_api/people/v1/model/interest.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/people/lib/google_api/people/v1/model/interest.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.People.V1.Model.Interest do
@moduledoc """
One of the person's interests.
## Attributes
* `metadata` (*type:* `GoogleApi.People.V1.Model.FieldMetadata.t`, *default:* `nil`) - Metadata about the interest.
* `value` (*type:* `String.t`, *default:* `nil`) - The interest; for example, `stargazing`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:metadata => GoogleApi.People.V1.Model.FieldMetadata.t() | nil,
:value => String.t() | nil
}
field(:metadata, as: GoogleApi.People.V1.Model.FieldMetadata)
field(:value)
end
defimpl Poison.Decoder, for: GoogleApi.People.V1.Model.Interest do
def decode(value, options) do
GoogleApi.People.V1.Model.Interest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.People.V1.Model.Interest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.38 | 119 | 0.718345 |
ff167dcd36db4dcf7d65dceef7247d3331c5d5e9 | 576 | ex | Elixir | lib/epi_contacts/unlock_identity_confirmation_session_worker.ex | RatioPBC/epi-contacts | 6c43eea52cbfe2097f48b02e3d0c8fce3b46f1ee | [
"Apache-2.0"
] | null | null | null | lib/epi_contacts/unlock_identity_confirmation_session_worker.ex | RatioPBC/epi-contacts | 6c43eea52cbfe2097f48b02e3d0c8fce3b46f1ee | [
"Apache-2.0"
] | 13 | 2021-06-29T04:35:41.000Z | 2022-02-09T04:25:39.000Z | lib/epi_contacts/unlock_identity_confirmation_session_worker.ex | RatioPBC/epi-contacts | 6c43eea52cbfe2097f48b02e3d0c8fce3b46f1ee | [
"Apache-2.0"
] | null | null | null | defmodule EpiContacts.UnlockIdentityConfirmationSessionWorker do
@moduledoc """
Oban worker that unlocks a confirm identity
"""
use Oban.Worker, queue: :default
alias EpiContacts.Questionnaire
alias EpiContacts.Repo
def schedule_in do
15 * 60
end
@impl Oban.Worker
def perform(%_{args: %{"session_id" => id}}) do
session = Questionnaire.get_identity_confirmation_session(id)
changeset = Questionnaire.change_identity_confirmation_session(session, %{unlocked_at: DateTime.utc_now()})
{:ok, _} = Repo.update(changeset)
:ok
end
end
| 24 | 111 | 0.734375 |
ff16d5f4c8e76f078abe3d9cd78582e6ea22c881 | 1,092 | ex | Elixir | lib/live_view_examples_web/channels/user_socket.ex | zorbash/observer_live | f78af309a85783ac61d97ba9bb5ffd7a64ec9823 | [
"MIT"
] | 219 | 2019-04-02T02:51:59.000Z | 2021-11-10T22:14:17.000Z | lib/live_view_examples_web/channels/user_socket.ex | zorbash/observer_live | f78af309a85783ac61d97ba9bb5ffd7a64ec9823 | [
"MIT"
] | 1 | 2021-05-08T11:51:29.000Z | 2021-05-08T11:51:29.000Z | lib/live_view_examples_web/channels/user_socket.ex | zorbash/observer_live | f78af309a85783ac61d97ba9bb5ffd7a64ec9823 | [
"MIT"
] | 14 | 2019-04-02T07:19:38.000Z | 2021-03-27T19:19:45.000Z | defmodule LiveViewExamplesWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", LiveViewExamplesWeb.RoomChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket, _connect_info) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# LiveViewExamplesWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 32.117647 | 91 | 0.704212 |
ff16ea51b00cc88f021e942559f1cb1371d071a7 | 508 | ex | Elixir | lib/exmqtt/logger.ex | kcurtin/exmqtt | 3431b560117e73c769d30d06b3feada170c3973f | [
"Apache-2.0"
] | 13 | 2020-04-12T13:24:32.000Z | 2021-08-17T00:25:09.000Z | lib/exmqtt/logger.ex | kcurtin/exmqtt | 3431b560117e73c769d30d06b3feada170c3973f | [
"Apache-2.0"
] | null | null | null | lib/exmqtt/logger.ex | kcurtin/exmqtt | 3431b560117e73c769d30d06b3feada170c3973f | [
"Apache-2.0"
] | 2 | 2020-05-14T12:43:48.000Z | 2021-03-02T17:36:24.000Z | defmodule Exmqtt.LogFormatter do
def format(level, message, timestamp, metadata) do
"#{fmt_timestamp(timestamp)} #{inspect(metadata)} [#{level}] #{message}\n"
rescue
_ -> "could not format message: #{inspect({level, message, timestamp, metadata})}\n"
end
defp fmt_timestamp({date, {hh, mm, ss, ms}}) do
with {:ok, timestamp} <- NaiveDateTime.from_erl({date, {hh, mm, ss}}, {ms * 1000, 3}),
result <- NaiveDateTime.to_iso8601(timestamp)
do
"#{result}Z"
end
end
end
| 29.882353 | 90 | 0.643701 |
ff16f1bb336ddc90207011d6859c71019f326e23 | 1,294 | exs | Elixir | test/shippo/operation/webhook_test.exs | christopherlai/shippo | 33d62242a5c3ad1d935888150d5cd630404d91f3 | [
"Unlicense",
"MIT"
] | null | null | null | test/shippo/operation/webhook_test.exs | christopherlai/shippo | 33d62242a5c3ad1d935888150d5cd630404d91f3 | [
"Unlicense",
"MIT"
] | null | null | null | test/shippo/operation/webhook_test.exs | christopherlai/shippo | 33d62242a5c3ad1d935888150d5cd630404d91f3 | [
"Unlicense",
"MIT"
] | null | null | null | defmodule Shippo.Operation.WebhookTest do
use ExUnit.Case, async: true
alias Shippo.Operation
alias Shippo.Operation.Webhook
describe "create/1" do
test "returns a Operations struct with :post and /webhooks" do
assert %Operation{
http_method: :post,
path: "/webhooks",
data: %{},
opts: []
} = Webhook.create(%{})
end
end
describe "get/1" do
test "returns a Operations struct with :get and /webhooks/:id" do
assert %Operation{
http_method: :get,
path: "/webhooks/123",
data: nil,
opts: []
} = Webhook.get("123")
end
end
describe "list/0" do
test "returns a Operations struct with :get and /webhooks" do
assert %Operation{
http_method: :get,
path: "/webhooks",
data: nil,
opts: []
} = Webhook.list()
end
end
describe "update/2" do
test "returns a Operations struct with :pust and /webhooks/:id" do
assert %Operation{
http_method: :put,
path: "/webhooks/123",
data: %{},
opts: []
} = Webhook.update("123", %{})
end
end
end
| 25.88 | 70 | 0.506955 |
ff16f3f1ff4a0800c838903823673a0a4df07795 | 476 | exs | Elixir | priv/repo/migrations/20150921163146_create_user.exs | applede/media-bot | e6d2ffe59f2aaea022f1bf05eed42a4559ce3859 | [
"MIT"
] | null | null | null | priv/repo/migrations/20150921163146_create_user.exs | applede/media-bot | e6d2ffe59f2aaea022f1bf05eed42a4559ce3859 | [
"MIT"
] | null | null | null | priv/repo/migrations/20150921163146_create_user.exs | applede/media-bot | e6d2ffe59f2aaea022f1bf05eed42a4559ce3859 | [
"MIT"
] | null | null | null | defmodule MediaBot.Repo.Migrations.CreateUser do
use Ecto.Migration
def change do
create table(:users) do
add :username, :string
add :hashed_password, :string
add :hashed_confirmation_token, :string
add :confirmed_at, :datetime
add :hashed_password_reset_token, :string
add :unconfirmed_email, :string
add :authentication_tokens, {:array, :string}, default: []
end
create unique_index(:users, [:username])
end
end
| 26.444444 | 64 | 0.693277 |
ff16f9149aa1f7db41cac5ddbcec571f968974aa | 1,460 | exs | Elixir | api/config/dev.exs | andrewvmail/momoirc | 15cd76dc8b5f99f72e4a8e05a35dca9bf293bfca | [
"MIT"
] | null | null | null | api/config/dev.exs | andrewvmail/momoirc | 15cd76dc8b5f99f72e4a8e05a35dca9bf293bfca | [
"MIT"
] | null | null | null | api/config/dev.exs | andrewvmail/momoirc | 15cd76dc8b5f99f72e4a8e05a35dca9bf293bfca | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :api, ApiWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: []
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
config :joken, default_signer: "secret" | 29.2 | 68 | 0.722603 |
ff17081b66a6ca6363c9c804e09b279572ae5590 | 2,122 | ex | Elixir | clients/cloud_kms/lib/google_api/cloud_kms/v1/model/set_iam_policy_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/cloud_kms/lib/google_api/cloud_kms/v1/model/set_iam_policy_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/cloud_kms/lib/google_api/cloud_kms/v1/model/set_iam_policy_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.CloudKMS.V1.Model.SetIamPolicyRequest do
@moduledoc """
Request message for `SetIamPolicy` method.
## Attributes
* `policy` (*type:* `GoogleApi.CloudKMS.V1.Model.Policy.t`, *default:* `nil`) - REQUIRED: The complete policy to be applied to the `resource`. The size of
the policy is limited to a few 10s of KB. An empty policy is a
valid policy but certain Cloud Platform services (such as Projects)
might reject them.
* `updateMask` (*type:* `String.t`, *default:* `nil`) - OPTIONAL: A FieldMask specifying which fields of the policy to modify. Only
the fields in the mask will be modified. If no mask is provided, the
following default mask is used:
paths: "bindings, etag"
This field is only used by Cloud IAM.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:policy => GoogleApi.CloudKMS.V1.Model.Policy.t(),
:updateMask => String.t()
}
field(:policy, as: GoogleApi.CloudKMS.V1.Model.Policy)
field(:updateMask)
end
defimpl Poison.Decoder, for: GoogleApi.CloudKMS.V1.Model.SetIamPolicyRequest do
def decode(value, options) do
GoogleApi.CloudKMS.V1.Model.SetIamPolicyRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudKMS.V1.Model.SetIamPolicyRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.22807 | 158 | 0.725259 |
ff1721104eee75b9ac7bf3724c11061f82889f6b | 815 | exs | Elixir | deps/phoenix_ecto/mix.exs | hallebadkapp/rumbl-ms | ae2ef9975658115f8c4d5c49c28d8bde00a74b83 | [
"MIT"
] | null | null | null | deps/phoenix_ecto/mix.exs | hallebadkapp/rumbl-ms | ae2ef9975658115f8c4d5c49c28d8bde00a74b83 | [
"MIT"
] | null | null | null | deps/phoenix_ecto/mix.exs | hallebadkapp/rumbl-ms | ae2ef9975658115f8c4d5c49c28d8bde00a74b83 | [
"MIT"
] | null | null | null | defmodule PhoenixEcto.Mixfile do
use Mix.Project
@version "3.0.1"
def project do
[app: :phoenix_ecto,
version: @version,
elixir: "~> 1.2",
deps: deps(),
# Hex
description: "Integration between Phoenix & Ecto",
package: package(),
# Docs
name: "Phoenix/Ecto",
docs: [source_ref: "v#{@version}",
source_url: "https://github.com/phoenixframework/phoenix_ecto"]]
end
def application do
[applications: [:logger, :ecto, :plug]]
end
defp package do
[maintainers: ["José Valim", "Chris Mccord"],
licenses: ["Apache 2.0"],
links: %{"GitHub" => "https://github.com/phoenixframework/phoenix_ecto"}]
end
defp deps do
[{:phoenix_html, "~> 2.6", optional: true},
{:ecto, "~> 2.0"},
{:plug, "~> 1.0"}]
end
end
| 21.447368 | 78 | 0.585276 |
ff1732e5c04c0fa6431b83da0fd51bd5842254cd | 5,408 | ex | Elixir | lib/mix/tasks/authority.gen.context.ex | infinitered/authority_ecto | 3d076b04c4ad4f5d5b89df990f91b45d1a45da19 | [
"MIT"
] | 11 | 2018-01-17T09:31:26.000Z | 2019-03-11T03:20:50.000Z | lib/mix/tasks/authority.gen.context.ex | infinitered/authority_ecto | 3d076b04c4ad4f5d5b89df990f91b45d1a45da19 | [
"MIT"
] | 24 | 2018-01-17T00:13:38.000Z | 2018-11-12T09:38:09.000Z | lib/mix/tasks/authority.gen.context.ex | infinitered/authority_ecto | 3d076b04c4ad4f5d5b89df990f91b45d1a45da19 | [
"MIT"
] | 3 | 2018-01-17T03:23:17.000Z | 2019-03-11T03:20:47.000Z | defmodule Mix.Tasks.Authority.Gen.Context do
use Mix.Task
alias Mix.Authority.Ecto.Context
@shortdoc "Generate a context with Authority"
@moduledoc """
Generates a new context with Authority ready to go.
mix authority.gen.context Accounts
The following files will be created (assuming the provided context name is `Accounts`):
* `lib/<your app>/accounts/accounts.ex`
* `test/<your app>/accounts/accounts_test.exs`
* `lib/<your app>/accounts/user.ex`
* `test/<your app>/accounts/user_test.exs`
* `lib/<your app>/accounts/token.ex`
* `test/<your app>/accounts/token_test.exs`
* `lib/<your app>/accounts/lock.ex`
* `test/<your app>/accounts/lock_test.exs`
* `lib/<your app>/accounts/attempt.ex`
* `test/<your app>/accounts/attempt_test.exs`
* `priv/repo/migrations/<timestamp>_authority_ecto.ex`
The generated files expect the following modules to exist (where
`MyApp` is the top-level namespace for your application):
* `MyApp.Repo`
* `MyApp.DataCase`
If you created your application using `mix phx.new`, these modules where
already defined for you.
## Options
* `--no-locking` - do not generate files for locking accounts
after a number of failed attempts
* `--no-recovery` - do not generate files for password resets
* `--no-tokenization` - do not generate files for creating tokens. When
choosing this option, you must also provide `--no-recovery`
* `--no-registration` - do not generate files for creating/updating users
"""
@context_template "priv/templates/authority.gen.context"
@switches [
tokenization: :boolean,
locking: :boolean,
recovery: :boolean,
registration: :boolean
]
@default_options [
authentication: true,
tokenization: true,
recovery: true,
registration: true,
locking: true
]
@doc """
Generate a new context with Authority.Ecto.Template preconfigured.
"""
def run([name | args]) do
context = Context.new(name)
{files, behaviours, config} = build_features(context, args)
binding = [
context: context,
config: config,
behaviours: Enum.sort_by(behaviours, &behaviour_order/1)
]
for {source, target} <- files do
Mix.Generator.create_file(target, render(source, binding))
end
Mix.shell().info("""
Remember to update your repository by running migrations:
$ mix ecto.migrate
Add a secret to your configuration for storing tokens:
config #{inspect(context.otp_app)}, #{inspect(context.token.module)}.HMAC,
secret_key: "some secure value"
""")
end
def run(args) do
Mix.raise(
"expected authority.gen.migration to receive a name for the new context, " <>
"got: #{inspect(Enum.join(args, " "))}"
)
end
defp render(source, binding) do
:authority_ecto
|> Application.app_dir(Path.join(@context_template, source))
|> EEx.eval_file(binding)
end
defp build_features(context, args) do
{options, _, _} = OptionParser.parse(args, switches: @switches)
@default_options
|> Keyword.merge(options)
|> Enum.reduce({[], [], []}, &build_feature(&2, context, &1))
end
defp build_feature(spec, context, {:authentication, true}) do
spec
|> put_file("migration.exs", context.migration.file)
|> put_file("context.ex", context.file)
|> put_file("context_test.exs", context.test_file)
|> put_file("user.ex", context.user.file)
|> put_file("user_test.exs", context.user.test_file)
|> put_behaviour(Authority.Authentication)
|> put_config(:repo, context.repo)
|> put_config(:user_schema, context.user.module)
end
defp build_feature(spec, _context, {:registration, true}) do
put_behaviour(spec, Authority.Registration)
end
defp build_feature(spec, context, {:recovery, true}) do
spec
|> put_behaviour(Authority.Recovery)
|> put_config(:recovery_callback, {context.module, :send_forgot_password_email})
end
defp build_feature(spec, context, {:tokenization, true}) do
spec
|> put_file("token.ex", context.token.file)
|> put_file("token_test.exs", context.token.test_file)
|> put_behaviour(Authority.Tokenization)
|> put_config(:token_schema, context.token.module)
end
defp build_feature(spec, context, {:locking, true}) do
spec
|> put_file("lock.ex", context.lock.file)
|> put_file("lock_test.exs", context.lock.test_file)
|> put_file("attempt.ex", context.attempt.file)
|> put_file("attempt_test.exs", context.attempt.test_file)
|> put_behaviour(Authority.Locking)
|> put_config(:lock_schema, context.lock.module)
|> put_config(:lock_attempt_schema, context.attempt.module)
end
defp build_feature(spec, _context, _), do: spec
defp put_file({files, behaviours, config}, src, dest) do
{[{src, dest} | files], behaviours, config}
end
defp put_behaviour({files, behaviours, config}, behaviour) do
{files, [behaviour | behaviours], config}
end
defp put_config({files, behaviours, config}, key, value) do
{files, behaviours, [{key, value} | config]}
end
defp behaviour_order(Authority.Authentication), do: 0
defp behaviour_order(Authority.Tokenization), do: 1
defp behaviour_order(Authority.Recovery), do: 2
defp behaviour_order(Authority.Registration), do: 3
defp behaviour_order(Authority.Locking), do: 4
end
| 30.212291 | 89 | 0.684911 |
ff17367a3b56bf14c6b486553b3d92ed5bfc90c7 | 335 | ex | Elixir | lib/mix/tasks/app_version.ex | ResiliaDev/Planga | b21d290dd7c2c7fa30571d0a5124d63bd09c0c9e | [
"MIT"
] | 37 | 2018-07-13T14:08:16.000Z | 2021-04-09T15:00:22.000Z | lib/mix/tasks/app_version.ex | ResiliaDev/Planga | b21d290dd7c2c7fa30571d0a5124d63bd09c0c9e | [
"MIT"
] | 9 | 2018-07-16T15:24:39.000Z | 2021-09-01T14:21:20.000Z | lib/mix/tasks/app_version.ex | ResiliaDev/Planga | b21d290dd7c2c7fa30571d0a5124d63bd09c0c9e | [
"MIT"
] | 3 | 2018-10-05T20:19:25.000Z | 2019-12-05T00:30:01.000Z | defmodule Mix.Tasks.AppVersion do
@moduledoc """
This is used to fetch the current application version set in `mix.exs`
from the command-line.
Useful for build scripts.
"""
use Mix.Task
# Only accessible during compile time
@version Mix.Project.config()[:version]
def run(_args) do
IO.puts(@version)
end
end
| 19.705882 | 72 | 0.704478 |
ff17455b41379f676b6d7126cabcd83da7968636 | 1,665 | exs | Elixir | mix.exs | zoten/cbl2019 | 3a71b3ea68ef8e80d8ab51dfd52403b6c7849b62 | [
"BSD-3-Clause"
] | 1 | 2019-03-22T17:10:48.000Z | 2019-03-22T17:10:48.000Z | mix.exs | zoten/cbl2019 | 3a71b3ea68ef8e80d8ab51dfd52403b6c7849b62 | [
"BSD-3-Clause"
] | null | null | null | mix.exs | zoten/cbl2019 | 3a71b3ea68ef8e80d8ab51dfd52403b6c7849b62 | [
"BSD-3-Clause"
] | null | null | null | defmodule Cbl2019.MixProject do
use Mix.Project
def project do
[
app: :cbl_2019,
version: "0.1.0",
elixir: "~> 1.8",
elixirc_paths: elixirc_paths(Mix.env()),
erlc_options: [],
# Let's give our Erlang dependency to the compiler
erlc_paths: ["deps/minidiadict"],
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger, :diameter, :minidiadict],
mod: {Cbl2019.Application, []}
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test"]
defp elixirc_paths(_), do: ["lib"]
# Run "mix help deps" to learn about dependencies.
defp deps do
IO.puts(Mix.env())
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
{:ex_doc, "~> 0.18", only: [:dev, :local]},
{:logger_file_backend, "~> 0.0.10"},
{:poison, "~> 3.1"},
{ :file_system, "~> 0.2", only: :test },
]
|> Enum.concat(extra_deps(Mix.env()))
end
# Sorry, added this in case I cannot access the internet and need to edit deps :)
defp extra_deps(:local) do
[
{:minidiadict, git: "/home/zoten/git_repos/cbl2019 - diameter/minidiadict", branch: "master"},
]
end
defp extra_deps(env) when env in [:dev] do
[
{:minidiadict, git: "https://github.com/zoten/minidiadict", branch: "master"},
]
end
defp extra_deps(env) when env in [:test] do
[
]
end
defp extra_deps(_env) do
[]
end
end
| 24.850746 | 100 | 0.593393 |
ff1774d7afedc0ca8b2f07df25842bf1773f3823 | 2,680 | ex | Elixir | lib/resin.ex | Frost/resin | 2d10f85bde7712c4016f6338e82aff4fd7b23514 | [
"MIT"
] | 6 | 2015-05-17T01:23:11.000Z | 2020-11-01T11:45:51.000Z | lib/resin.ex | Frost/resin | 2d10f85bde7712c4016f6338e82aff4fd7b23514 | [
"MIT"
] | null | null | null | lib/resin.ex | Frost/resin | 2d10f85bde7712c4016f6338e82aff4fd7b23514 | [
"MIT"
] | null | null | null | defmodule Resin do
@moduledoc """
Introduce a configurable delay to all requests in a plug pipeline.
Usage
use Resin
This will introduce a delay to all incoming requests. This delay defaults to
3000 ms, but can be configured by the `enterpriseyness` option, like so:
use Resin, enterpriseyness: 4711
It can also be configured with a range of integers, to allow some
variation in how long the delay will be:
use Resin, enterpriseyness: 1_000 .. 3_000
Another option is to configure it with a pattern of delays. This is
done by configuring the `enterpriseyness` option with an array.
use Resin, enterpriseyness: [ 0, 0, 0, 1_000 ]
Resin will cycle through this array as it processes requests, so the
result of the above example is that every 4th request will have an
added delay of 1000 ms.
This can also be done with an array of ranges, or an array that
combines integers and ranges, like so:
use Resin, enterpriseyness: [ 0, 100 .. 200, 1_000, 300 .. 400 ]
When running with `MIX_ENV=prod`, Resin will do nothing, but instead just edit
itself out of your AST. See docs on `Resin.__using__/1` for more info on that.
"""
@behaviour Plug
@default_options [enterpriseyness: 3_000]
def init(options \\ []) do
forecast_level =
@default_options
|> Keyword.merge(options)
|> Keyword.get(:enterpriseyness)
|> List.wrap
{:ok, agent} = PerformanceForecast.init(forecast_level)
[forecast: agent]
end
def call(conn, options) do
agent = Keyword.get(options, :forecast)
agent
|> PerformanceForecast.pop()
|> enterpriseyness()
|> :timer.sleep()
conn
end
defp enterpriseyness(min .. max) when max < min,
do: enterpriseyness(max .. min)
defp enterpriseyness(min .. max),
do: min + :rand.uniform(max - min)
defp enterpriseyness(level),
do: level
@doc """
Pour some resin in your plug pipeline, by `use`ing this module.
use Resin
Resin will insert itself into your pipeline, unless you compiled your project
with `MIX_ENV=prod`, and add a configurable delay to every request.
The default delay is set to 3000 ms.
If you want a shorter (or longer (really?)) delay, you can use the
`enterpriseyness` option to set the configured delay in milliseconds, like so:
use Resin, enterpriseyness: 4711
When compiling with `MIX_ENV=prod`, this macro will do nothing. You only want
to display the enterpriseyness during your demo, right?
"""
defmacro __using__(opts \\ []) do
unless Mix.env == :prod do
quote bind_quoted: [opts: opts] do
plug Resin, opts
end
end
end
end
| 29.130435 | 80 | 0.69291 |
ff177c65f93b5635ba6ba6891ea19beb6ecd7909 | 1,052 | ex | Elixir | web/router.ex | franknfjr/blog_lca | 0711ad6ba6ee878045905ec58a549527ffa5e0a4 | [
"MIT"
] | null | null | null | web/router.ex | franknfjr/blog_lca | 0711ad6ba6ee878045905ec58a549527ffa5e0a4 | [
"MIT"
] | null | null | null | web/router.ex | franknfjr/blog_lca | 0711ad6ba6ee878045905ec58a549527ffa5e0a4 | [
"MIT"
] | null | null | null | defmodule Login.Router do
use Login.Web, :router
use Coherence.Router # Add this
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
plug Coherence.Authentication.Session # Add this
end
pipeline :protected do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
plug Coherence.Authentication.Session, protected: true # Add this
plug PolicyWonk.Enforce, :current_user
end
# Add this block
scope "/" do
pipe_through :browser
coherence_routes
end
# Add this block
scope "/" do
pipe_through :protected
coherence_routes :protected
end
scope "/", Login do
pipe_through :browser
get "/", PageController, :index
# Add public routes below
end
scope "/", Login do
pipe_through :protected
# Add protected routes below
resources "/posts", PostController
end
end
| 21.04 | 70 | 0.691065 |
ff179eb4b7a65418ad451b9f503b587fed413de4 | 463 | ex | Elixir | lib/mix/tasks/gitlab_data.load_projects.ex | crosscloudci/ci_status_repository | 335e8b89bbf59e6cf63e49541ce3ea6b60167e52 | [
"Apache-2.0"
] | 2 | 2019-03-05T16:29:10.000Z | 2020-01-17T14:11:48.000Z | lib/mix/tasks/gitlab_data.load_projects.ex | crosscloudci/ci_status_repository | 335e8b89bbf59e6cf63e49541ce3ea6b60167e52 | [
"Apache-2.0"
] | 3 | 2019-03-18T20:26:48.000Z | 2020-06-25T14:31:13.000Z | lib/mix/tasks/gitlab_data.load_projects.ex | crosscloudci/ci_status_repository | 335e8b89bbf59e6cf63e49541ce3ea6b60167e52 | [
"Apache-2.0"
] | 1 | 2018-06-16T15:32:25.000Z | 2018-06-16T15:32:25.000Z | defmodule Mix.Tasks.GitlabData.LoadProjects do
use Mix.Task
import Mix.Ecto
@shortdoc "Upsert all projects"
def run(_) do
ensure_started(CncfDashboardApi.Repo, [])
Application.ensure_all_started(:cncf_dashboard_api)
upsert_projects_count = CncfDashboardApi.GitlabMigrations.upsert_projects()
upsert_count = CncfDashboardApi.GitlabMigrations.upsert_yml_projects()
Mix.shell.info(inspect(upsert_count) <> " records upserted")
end
end
| 33.071429 | 79 | 0.781857 |
ff17cbe3f5ffb98b9abc61f276de06d23c369023 | 73 | ex | Elixir | lib/pass.ex | elixir-extracts/passport | 2031d7b874e74746eea14ce1a83911be84ce8c2b | [
"Apache-2.0"
] | 1 | 2016-04-28T18:32:10.000Z | 2016-04-28T18:32:10.000Z | lib/pass.ex | elixir-extracts/passport | 2031d7b874e74746eea14ce1a83911be84ce8c2b | [
"Apache-2.0"
] | null | null | null | lib/pass.ex | elixir-extracts/passport | 2031d7b874e74746eea14ce1a83911be84ce8c2b | [
"Apache-2.0"
] | null | null | null | defmodule Pass do
@moduledoc File.read!("#{__DIR__}/../README.md")
end
| 18.25 | 50 | 0.684932 |
ff17d1e6e7244b1c8ea4b5c84678db544134ed6e | 14,097 | ex | Elixir | clients/compute/lib/google_api/compute/v1/api/machine_types.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/api/machine_types.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/api/machine_types.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Api.MachineTypes do
@moduledoc """
API calls for all endpoints tagged `MachineTypes`.
"""
alias GoogleApi.Compute.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Retrieves an aggregated list of machine types.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:filter` (*type:* `String.t`) - A filter expression that filters resources listed in the response. The expression must specify the field name, a comparison operator, and the value that you want to use for filtering. The value must be a string, a number, or a boolean. The comparison operator must be either `=`, `!=`, `>`, or `<`.
For example, if you are filtering Compute Engine instances, you can exclude instances named `example-instance` by specifying `name != example-instance`.
You can also filter nested fields. For example, you could specify `scheduling.automaticRestart = false` to include instances only if they are not scheduled for automatic restarts. You can use filtering on nested fields to filter based on resource labels.
To filter on multiple expressions, provide each separate expression within parentheses. For example: ``` (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake") ``` By default, each expression is an `AND` expression. However, you can include `AND` and `OR` expressions explicitly. For example: ``` (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true) ```
* `:includeAllScopes` (*type:* `boolean()`) - Indicates whether every visible scope for each scope type (zone, region, global) should be included in the response. For new resource types added after this field, the flag has no effect as new resource types will always include every visible scope for each scope type in response. For resource types which predate this field, if this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included.
* `:maxResults` (*type:* `integer()`) - The maximum number of results per page that should be returned. If the number of available results is larger than `maxResults`, Compute Engine returns a `nextPageToken` that can be used to get the next page of results in subsequent list requests. Acceptable values are `0` to `500`, inclusive. (Default: `500`)
* `:orderBy` (*type:* `String.t`) - Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name.
You can also sort results in descending order based on the creation timestamp using `orderBy="creationTimestamp desc"`. This sorts results based on the `creationTimestamp` field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first.
Currently, only sorting by `name` or `creationTimestamp desc` is supported.
* `:pageToken` (*type:* `String.t`) - Specifies a page token to use. Set `pageToken` to the `nextPageToken` returned by a previous list request to get the next page of results.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.MachineTypeAggregatedList{}}` on success
* `{:error, info}` on failure
"""
@spec compute_machine_types_aggregated_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.MachineTypeAggregatedList.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def compute_machine_types_aggregated_list(
connection,
project,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:filter => :query,
:includeAllScopes => :query,
:maxResults => :query,
:orderBy => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/{project}/aggregated/machineTypes", %{
"project" => URI.encode(project, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.MachineTypeAggregatedList{}])
end
@doc """
Returns the specified machine type. Gets a list of available machine types by making a list() request.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `zone` (*type:* `String.t`) - The name of the zone for this request.
* `machine_type` (*type:* `String.t`) - Name of the machine type to return.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.MachineType{}}` on success
* `{:error, info}` on failure
"""
@spec compute_machine_types_get(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.MachineType.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def compute_machine_types_get(
connection,
project,
zone,
machine_type,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/{project}/zones/{zone}/machineTypes/{machineType}", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"zone" => URI.encode(zone, &URI.char_unreserved?/1),
"machineType" => URI.encode(machine_type, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.MachineType{}])
end
@doc """
Retrieves a list of machine types available to the specified project.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `zone` (*type:* `String.t`) - The name of the zone for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:filter` (*type:* `String.t`) - A filter expression that filters resources listed in the response. The expression must specify the field name, a comparison operator, and the value that you want to use for filtering. The value must be a string, a number, or a boolean. The comparison operator must be either `=`, `!=`, `>`, or `<`.
For example, if you are filtering Compute Engine instances, you can exclude instances named `example-instance` by specifying `name != example-instance`.
You can also filter nested fields. For example, you could specify `scheduling.automaticRestart = false` to include instances only if they are not scheduled for automatic restarts. You can use filtering on nested fields to filter based on resource labels.
To filter on multiple expressions, provide each separate expression within parentheses. For example: ``` (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake") ``` By default, each expression is an `AND` expression. However, you can include `AND` and `OR` expressions explicitly. For example: ``` (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true) ```
* `:maxResults` (*type:* `integer()`) - The maximum number of results per page that should be returned. If the number of available results is larger than `maxResults`, Compute Engine returns a `nextPageToken` that can be used to get the next page of results in subsequent list requests. Acceptable values are `0` to `500`, inclusive. (Default: `500`)
* `:orderBy` (*type:* `String.t`) - Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name.
You can also sort results in descending order based on the creation timestamp using `orderBy="creationTimestamp desc"`. This sorts results based on the `creationTimestamp` field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first.
Currently, only sorting by `name` or `creationTimestamp desc` is supported.
* `:pageToken` (*type:* `String.t`) - Specifies a page token to use. Set `pageToken` to the `nextPageToken` returned by a previous list request to get the next page of results.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.MachineTypeList{}}` on success
* `{:error, info}` on failure
"""
@spec compute_machine_types_list(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.MachineTypeList.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def compute_machine_types_list(connection, project, zone, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:filter => :query,
:maxResults => :query,
:orderBy => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/{project}/zones/{zone}/machineTypes", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"zone" => URI.encode(zone, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.MachineTypeList{}])
end
end
| 55.282353 | 511 | 0.664113 |
ff182e823cd765f9a50b9e0568b469ab5999513d | 144 | ex | Elixir | web/controllers/top_controller.ex | Atla0903/nano_planner | 6ffc83e09853084e932a063a9ce651563b8b38e1 | [
"MIT"
] | null | null | null | web/controllers/top_controller.ex | Atla0903/nano_planner | 6ffc83e09853084e932a063a9ce651563b8b38e1 | [
"MIT"
] | null | null | null | web/controllers/top_controller.ex | Atla0903/nano_planner | 6ffc83e09853084e932a063a9ce651563b8b38e1 | [
"MIT"
] | null | null | null | defmodule NanoPlanner.TopController do
use NanoPlanner.Web, :controller
def index(conn, _params) do
render conn, "index.html"
end
end | 20.571429 | 38 | 0.75 |
ff18c7a3a69e996eca52b78374c3c0746d862098 | 2,238 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_batch_delete_entities_request.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2019-01-03T22:30:36.000Z | 2019-01-03T22:30:36.000Z | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_batch_delete_entities_request.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_batch_delete_entities_request.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2BatchDeleteEntitiesRequest do
@moduledoc """
The request message for EntityTypes.BatchDeleteEntities.
## Attributes
- entityValues ([String.t]): Required. The canonical `values` of the entities to delete. Note that these are not fully-qualified names, i.e. they don't start with `projects/<Project ID>`. Defaults to: `null`.
- languageCode (String.t): Optional. The language of entity synonyms defined in `entities`. If not specified, the agent's default language is used. [More than a dozen languages](https://dialogflow.com/docs/reference/language) are supported. Note: languages must be enabled in the agent, before they can be used. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:entityValues => list(any()),
:languageCode => any()
}
field(:entityValues, type: :list)
field(:languageCode)
end
defimpl Poison.Decoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2BatchDeleteEntitiesRequest do
def decode(value, options) do
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2BatchDeleteEntitiesRequest.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2BatchDeleteEntitiesRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.964286 | 348 | 0.751117 |
ff18c84171272f52926405d1c8c56f9e6696dae0 | 3,005 | ex | Elixir | lib/engine_adapters/mamdani.ex | valiot/flex | d092e1b8b0f38eeaed55ae29452d0b7d7f8965e4 | [
"MIT"
] | 7 | 2019-05-23T22:07:20.000Z | 2021-12-14T21:23:20.000Z | lib/engine_adapters/mamdani.ex | valiot/flex | d092e1b8b0f38eeaed55ae29452d0b7d7f8965e4 | [
"MIT"
] | null | null | null | lib/engine_adapters/mamdani.ex | valiot/flex | d092e1b8b0f38eeaed55ae29452d0b7d7f8965e4 | [
"MIT"
] | null | null | null | defmodule Flex.EngineAdapter.Mamdani do
@moduledoc """
Mamdani fuzzy inference was first introduced as a method to create a control system by synthesizing a set of linguistic control rules obtained from experienced human operators.
In a Mamdani system, the output of each rule is a fuzzy set. Since Mamdani systems have more intuitive and easier to understand rule bases,
they are well-suited to expert system applications where the rules are created from human expert knowledge, such as medical diagnostics.
"""
alias Flex.{EngineAdapter, EngineAdapter.State, Variable}
@behaviour EngineAdapter
import Flex.Rule, only: [statement: 2, get_rule_parameters: 3]
@impl EngineAdapter
def validation(engine_state, _antecedent, _rules, _consequent),
do: engine_state
@impl EngineAdapter
def fuzzification(%State{input_vector: input_vector} = engine_state, antecedent) do
fuzzy_antecedent = EngineAdapter.default_fuzzification(input_vector, antecedent, %{})
%{engine_state | fuzzy_antecedent: fuzzy_antecedent}
end
@impl EngineAdapter
def inference(%State{fuzzy_antecedent: fuzzy_antecedent} = engine_state, rules, consequent) do
fuzzy_consequent =
fuzzy_antecedent
|> inference_engine(rules, consequent)
|> output_combination()
%{engine_state | fuzzy_consequent: fuzzy_consequent}
end
@impl EngineAdapter
def defuzzification(%State{fuzzy_consequent: fuzzy_consequent} = engine_state) do
%{engine_state | crisp_output: centroid_method(fuzzy_consequent)}
end
def inference_engine(_fuzzy_antecedent, [], consequent), do: consequent
def inference_engine(fuzzy_antecedent, [rule | tail], consequent) do
rule_parameters = get_rule_parameters(rule.antecedent, fuzzy_antecedent, []) ++ [consequent]
consequent =
if is_function(rule.statement) do
rule.statement.(rule_parameters)
else
args = Map.merge(fuzzy_antecedent, %{consequent.tag => consequent})
statement(rule.statement, args)
end
inference_engine(fuzzy_antecedent, tail, consequent)
end
defp output_combination(cons_var) do
output = Enum.map(cons_var.fuzzy_sets, fn x -> root_sum_square(cons_var.mf_values[x.tag]) end)
%{cons_var | rule_output: output}
end
defp root_sum_square(nil), do: 0.0
defp root_sum_square(mf_value) do
mf_value
|> Enum.map(fn x -> x * x end)
|> Enum.sum()
|> :math.sqrt()
end
@doc """
Turns an consequent fuzzy variable (output) from a fuzzy value to a crisp value (centroid method).
"""
@spec centroid_method(Flex.Variable.t()) :: float
def centroid_method(%Variable{type: type} = fuzzy_var) when type == :consequent do
fuzzy_to_crisp(fuzzy_var.fuzzy_sets, fuzzy_var.rule_output, 0, 0)
end
defp fuzzy_to_crisp([], _input, nom, den), do: nom / den
defp fuzzy_to_crisp([fs | f_tail], [input | i_tail], nom, den) do
nom = nom + fs.mf_center * input
den = den + input
fuzzy_to_crisp(f_tail, i_tail, nom, den)
end
end
| 36.204819 | 178 | 0.732446 |
ff18ec8c09ceebe5d4d117de44eda58ac31043ac | 1,487 | ex | Elixir | core/sup_tree_core/gear_config_poller.ex | sylph01/antikythera | 47a93f3d4c70975f7296725c9bde2ea823867436 | [
"Apache-2.0"
] | 144 | 2018-04-27T07:24:49.000Z | 2022-03-15T05:19:37.000Z | core/sup_tree_core/gear_config_poller.ex | sylph01/antikythera | 47a93f3d4c70975f7296725c9bde2ea823867436 | [
"Apache-2.0"
] | 123 | 2018-05-01T02:54:43.000Z | 2022-01-28T01:30:52.000Z | core/sup_tree_core/gear_config_poller.ex | sylph01/antikythera | 47a93f3d4c70975f7296725c9bde2ea823867436 | [
"Apache-2.0"
] | 14 | 2018-05-01T02:30:47.000Z | 2022-02-21T04:38:56.000Z | # Copyright(c) 2015-2021 ACCESS CO., LTD. All rights reserved.
use Croma
defmodule AntikytheraCore.GearConfigPoller do
@moduledoc """
Periodically polls changes in gear configs.
Gear configs are persisted in a shared data storage (e.g. NFS) and are cached in ETS in each erlang node.
Note that gear configs are loaded (cached into ETS) at each step of
- `AntikytheraCore.start/2`: all existing gear configs are loaded
- each gear's `start/2`: the gear's gear config is loaded
Thus this GenServer's responsibility is just to keep up with changes in the shared data storage.
Depends on `AntikytheraCore.GearManager` (when applying changes in gear configs results in update of cowboy routing).
"""
use GenServer
alias AntikytheraCore.Config.Gear, as: GearConfig
@interval 120_000
@typep state_t :: %{last_checked_at: pos_integer}
def start_link([]) do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
@impl true
def init(:ok) do
{:ok, %{last_checked_at: 0}, @interval}
end
@impl true
def handle_cast(:reload, state) do
handle_gear_config_loading(state)
end
@impl true
def handle_info(:timeout, state) do
handle_gear_config_loading(state)
end
defunp handle_gear_config_loading(state :: state_t) :: {:noreply, state_t, timeout} do
checked_at = System.system_time(:second)
GearConfig.load_all(state[:last_checked_at])
{:noreply, %{state | last_checked_at: checked_at}, @interval}
end
end
| 28.596154 | 119 | 0.733692 |
ff190c0341714daa0a3c6cd36b46c39fcd790c72 | 1,016 | ex | Elixir | api/knot/lib/knot.ex | lukasz-madon/knot | 4c56af2d9d73b088af9e66b87d1ce85df752ad8d | [
"MIT"
] | null | null | null | api/knot/lib/knot.ex | lukasz-madon/knot | 4c56af2d9d73b088af9e66b87d1ce85df752ad8d | [
"MIT"
] | null | null | null | api/knot/lib/knot.ex | lukasz-madon/knot | 4c56af2d9d73b088af9e66b87d1ce85df752ad8d | [
"MIT"
] | null | null | null | defmodule Knot do
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
# Start the Ecto repository
supervisor(Knot.Repo, []),
# Start the endpoint when the application starts
supervisor(Knot.Endpoint, []),
# Start your own worker by calling: Knot.Worker.start_link(arg1, arg2, arg3)
# worker(Knot.Worker, [arg1, arg2, arg3]),
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Knot.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
Knot.Endpoint.config_change(changed, removed)
:ok
end
end
| 31.75 | 82 | 0.706693 |
ff1917910a67e41984c738de36745920a553d048 | 270 | ex | Elixir | lib/algoliax/application.ex | kianmeng/algoliax | 7226c9e7a142f24522a52d390f0a828b85b3abf9 | [
"BSD-2-Clause"
] | 25 | 2020-12-18T10:05:35.000Z | 2022-02-01T13:55:11.000Z | lib/algoliax/application.ex | kianmeng/algoliax | 7226c9e7a142f24522a52d390f0a828b85b3abf9 | [
"BSD-2-Clause"
] | 12 | 2019-11-22T09:40:51.000Z | 2020-08-19T06:09:15.000Z | lib/algoliax/application.ex | kianmeng/algoliax | 7226c9e7a142f24522a52d390f0a828b85b3abf9 | [
"BSD-2-Clause"
] | 4 | 2021-02-05T15:21:44.000Z | 2022-03-02T16:12:15.000Z | defmodule Algoliax.Application do
@moduledoc false
use Application
def start(_type, _args) do
children = [
Algoliax.SettingsStore
]
opts = [strategy: :one_for_one, name: Algoliax.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 18 | 62 | 0.703704 |
ff191c9568e6282d475e338674bdde7debae1de9 | 269 | ex | Elixir | lib/livebook/runtime/evaluator/identity_formatter.ex | aleDsz/livebook | 3ad817ac69b8459b684ff8d00c879ae7787b6dcc | [
"Apache-2.0"
] | null | null | null | lib/livebook/runtime/evaluator/identity_formatter.ex | aleDsz/livebook | 3ad817ac69b8459b684ff8d00c879ae7787b6dcc | [
"Apache-2.0"
] | null | null | null | lib/livebook/runtime/evaluator/identity_formatter.ex | aleDsz/livebook | 3ad817ac69b8459b684ff8d00c879ae7787b6dcc | [
"Apache-2.0"
] | null | null | null | defmodule Livebook.Runtime.Evaluator.IdentityFormatter do
@moduledoc false
# The default formatter leaving the output unchanged.
@behaviour Livebook.Runtime.Evaluator.Formatter
@impl true
def format_result(evaluation_response), do: evaluation_response
end
| 24.454545 | 65 | 0.814126 |
ff1931557d2c4db7b79cc15637fe473b58742ea3 | 14,712 | ex | Elixir | lib/utils/serialization_utils.ex | aeternity/aepp-sdk-elixir | a001b0eb264665623c9b05de25a71d1f13990679 | [
"0BSD"
] | 19 | 2019-04-16T07:27:53.000Z | 2022-01-22T21:35:02.000Z | lib/utils/serialization_utils.ex | aeternity/aepp-sdk-elixir | a001b0eb264665623c9b05de25a71d1f13990679 | [
"0BSD"
] | 131 | 2019-04-05T13:01:37.000Z | 2020-07-09T14:53:34.000Z | lib/utils/serialization_utils.ex | aeternity/aepp-sdk-elixir | a001b0eb264665623c9b05de25a71d1f13990679 | [
"0BSD"
] | 5 | 2019-04-11T19:21:42.000Z | 2022-03-06T09:08:34.000Z | defmodule AeppSDK.Utils.SerializationUtils do
@moduledoc false
alias AeppSDK.Utils.{Encoding, Keys}
alias AeternityNode.Model.{
ChannelCloseMutualTx,
ChannelCloseSoloTx,
ChannelCreateTx,
ChannelDepositTx,
ChannelForceProgressTx,
ChannelSettleTx,
ChannelSlashTx,
ChannelSnapshotSoloTx,
ChannelWithdrawTx,
ContractCallTx,
ContractCreateTx,
NameClaimTx,
NamePreclaimTx,
NameRevokeTx,
NameTransferTx,
NameUpdateTx,
OracleExtendTx,
OracleQueryTx,
OracleRegisterTx,
OracleRespondTx,
RelativeTtl,
SpendTx,
Ttl
}
@spec process_tx_fields(struct()) :: tuple()
def process_tx_fields(%SpendTx{
recipient_id: tx_recipient_id,
amount: amount,
fee: fee,
ttl: ttl,
sender_id: tx_sender_id,
nonce: nonce,
payload: payload
}) do
sender_id = proccess_id_to_record(tx_sender_id)
recipient_id = proccess_id_to_record(tx_recipient_id)
{:ok,
[
sender_id,
recipient_id,
amount,
fee,
ttl,
nonce,
payload
], :spend_tx}
end
def process_tx_fields(%OracleRegisterTx{
query_format: query_format,
response_format: response_format,
query_fee: query_fee,
oracle_ttl: %Ttl{type: type, value: value},
account_id: tx_account_id,
nonce: nonce,
fee: fee,
ttl: ttl,
abi_version: abi_version
}) do
account_id = proccess_id_to_record(tx_account_id)
ttl_type =
case type do
:absolute -> 1
:relative -> 0
end
{:ok,
[
account_id,
nonce,
query_format,
response_format,
query_fee,
ttl_type,
value,
fee,
ttl,
abi_version
], :oracle_register_tx}
end
def process_tx_fields(%OracleRespondTx{
query_id: query_id,
response: response,
response_ttl: %RelativeTtl{type: _type, value: value},
fee: fee,
ttl: ttl,
oracle_id: tx_oracle_id,
nonce: nonce
}) do
oracle_id = proccess_id_to_record(tx_oracle_id)
binary_query_id = Encoding.prefix_decode_base58c(query_id)
{:ok,
[
oracle_id,
nonce,
binary_query_id,
response,
0,
value,
fee,
ttl
], :oracle_response_tx}
end
def process_tx_fields(%OracleQueryTx{
oracle_id: tx_oracle_id,
query: query,
query_fee: query_fee,
query_ttl: %Ttl{type: query_type, value: query_value},
response_ttl: %RelativeTtl{type: _response_type, value: response_value},
fee: fee,
ttl: ttl,
sender_id: tx_sender_id,
nonce: nonce
}) do
sender_id = proccess_id_to_record(tx_sender_id)
oracle_id = proccess_id_to_record(tx_oracle_id)
query_ttl_type =
case query_type do
:absolute -> 1
:relative -> 0
end
{:ok,
[
sender_id,
nonce,
oracle_id,
query,
query_fee,
query_ttl_type,
query_value,
0,
response_value,
fee,
ttl
], :oracle_query_tx}
end
def process_tx_fields(%OracleExtendTx{
fee: fee,
oracle_ttl: %RelativeTtl{type: _type, value: value},
oracle_id: tx_oracle_id,
nonce: nonce,
ttl: ttl
}) do
oracle_id = proccess_id_to_record(tx_oracle_id)
{:ok,
[
oracle_id,
nonce,
0,
value,
fee,
ttl
], :oracle_extend_tx}
end
def process_tx_fields(%NameClaimTx{
name: name,
name_salt: name_salt,
name_fee: name_fee,
fee: fee,
ttl: ttl,
account_id: tx_account_id,
nonce: nonce
}) do
account_id = proccess_id_to_record(tx_account_id)
{:ok,
[
account_id,
nonce,
name,
name_salt,
name_fee,
fee,
ttl
], :name_claim_tx}
end
def process_tx_fields(%NamePreclaimTx{
commitment_id: tx_commitment_id,
fee: fee,
ttl: ttl,
account_id: tx_account_id,
nonce: nonce
}) do
account_id = proccess_id_to_record(tx_account_id)
commitment_id = proccess_id_to_record(tx_commitment_id)
{:ok,
[
account_id,
nonce,
commitment_id,
fee,
ttl
], :name_preclaim_tx}
end
def process_tx_fields(%NameUpdateTx{
name_id: tx_name_id,
name_ttl: name_ttl,
pointers: pointers,
client_ttl: client_ttl,
fee: fee,
ttl: ttl,
account_id: tx_account_id,
nonce: nonce
}) do
account_id = proccess_id_to_record(tx_account_id)
name_id = proccess_id_to_record(tx_name_id)
{:ok,
[
account_id,
nonce,
name_id,
name_ttl,
pointers,
client_ttl,
fee,
ttl
], :name_update_tx}
end
def process_tx_fields(%NameRevokeTx{
name_id: tx_name_id,
fee: fee,
ttl: ttl,
account_id: tx_account_id,
nonce: nonce
}) do
account_id = proccess_id_to_record(tx_account_id)
name_id = proccess_id_to_record(tx_name_id)
{:ok,
[
account_id,
nonce,
name_id,
fee,
ttl
], :name_revoke_tx}
end
def process_tx_fields(%NameTransferTx{
name_id: tx_name_id,
recipient_id: tx_recipient_id,
fee: fee,
ttl: ttl,
account_id: tx_account_id,
nonce: nonce
}) do
account_id = proccess_id_to_record(tx_account_id)
name_id = proccess_id_to_record(tx_name_id)
recipient_id = proccess_id_to_record(tx_recipient_id)
{:ok,
[
account_id,
nonce,
name_id,
recipient_id,
fee,
ttl
], :name_transfer_tx}
end
def process_tx_fields(%ContractCreateTx{
owner_id: tx_owner_id,
nonce: nonce,
code: code,
abi_version: ct_version,
deposit: deposit,
amount: amount,
gas: gas,
gas_price: gas_price,
fee: fee,
ttl: ttl,
call_data: call_data
}) do
owner_id = proccess_id_to_record(tx_owner_id)
{:ok,
[
owner_id,
nonce,
code,
ct_version,
fee,
ttl,
deposit,
amount,
gas,
gas_price,
call_data
], :contract_create_tx}
end
def process_tx_fields(%ContractCallTx{
caller_id: tx_caller_id,
nonce: nonce,
contract_id: tx_contract_id,
abi_version: abi_version,
fee: fee,
ttl: ttl,
amount: amount,
gas: gas,
gas_price: gas_price,
call_data: call_data
}) do
caller_id = proccess_id_to_record(tx_caller_id)
contract_id = proccess_id_to_record(tx_contract_id)
{:ok,
[
caller_id,
nonce,
contract_id,
abi_version,
fee,
ttl,
amount,
gas,
gas_price,
call_data
], :contract_call_tx}
end
def process_tx_fields(%ChannelCreateTx{
initiator_id: initiator,
initiator_amount: initiator_amount,
responder_id: responder,
responder_amount: responder_amount,
channel_reserve: channel_reserve,
lock_period: lock_period,
ttl: ttl,
fee: fee,
delegate_ids: delegate_ids,
state_hash: <<"st_", state_hash::binary>>,
nonce: nonce
}) do
decoded_state_hash = Encoding.decode_base58c(state_hash)
initiator_id = proccess_id_to_record(initiator)
responder_id = proccess_id_to_record(responder)
list_delegate_ids =
for id <- delegate_ids do
proccess_id_to_record(id)
end
{:ok,
[
initiator_id,
initiator_amount,
responder_id,
responder_amount,
channel_reserve,
lock_period,
ttl,
fee,
list_delegate_ids,
decoded_state_hash,
nonce
], :channel_create_tx}
end
def process_tx_fields(%ChannelCloseMutualTx{
channel_id: channel,
fee: fee,
from_id: from,
initiator_amount_final: initiator_amount_final,
nonce: nonce,
responder_amount_final: responder_amount_final,
ttl: ttl
}) do
channel_id = proccess_id_to_record(channel)
from_id = proccess_id_to_record(from)
{:ok,
[
channel_id,
from_id,
initiator_amount_final,
responder_amount_final,
ttl,
fee,
nonce
], :channel_close_mutual_tx}
end
def process_tx_fields(%ChannelCloseSoloTx{
channel_id: channel,
fee: fee,
from_id: from,
nonce: nonce,
payload: payload,
poi: poi,
ttl: ttl
}) do
channel_id = proccess_id_to_record(channel)
from_id = proccess_id_to_record(from)
{:ok,
[
channel_id,
from_id,
payload,
poi,
ttl,
fee,
nonce
], :channel_close_solo_tx}
end
def process_tx_fields(%ChannelDepositTx{
amount: amount,
channel_id: channel,
fee: fee,
from_id: from,
nonce: nonce,
round: round,
state_hash: <<"st_", state_hash::binary>>,
ttl: ttl
}) do
decoded_state_hash = Encoding.decode_base58c(state_hash)
channel_id = proccess_id_to_record(channel)
from_id = proccess_id_to_record(from)
{:ok,
[
channel_id,
from_id,
amount,
ttl,
fee,
decoded_state_hash,
round,
nonce
], :channel_deposit_tx}
end
def process_tx_fields(%ChannelForceProgressTx{
channel_id: channel,
fee: fee,
from_id: from,
nonce: nonce,
offchain_trees: offchain_trees,
payload: payload,
round: round,
state_hash: <<"st_", state_hash::binary>>,
ttl: ttl,
update: update
}) do
decoded_state_hash = Encoding.decode_base58c(state_hash)
channel_id = proccess_id_to_record(channel)
from_id = proccess_id_to_record(from)
{:ok,
[
channel_id,
from_id,
payload,
round,
update,
decoded_state_hash,
offchain_trees,
ttl,
fee,
nonce
], :channel_force_progress_tx}
end
def process_tx_fields(%ChannelSettleTx{
channel_id: channel,
fee: fee,
from_id: from,
initiator_amount_final: initiator_amount_final,
nonce: nonce,
responder_amount_final: responder_amount_final,
ttl: ttl
}) do
channel_id = proccess_id_to_record(channel)
from_id = proccess_id_to_record(from)
{:ok, [channel_id, from_id, initiator_amount_final, responder_amount_final, ttl, fee, nonce],
:channel_settle_tx}
end
def process_tx_fields(%ChannelSlashTx{
channel_id: channel,
fee: fee,
from_id: from,
nonce: nonce,
payload: payload,
poi: poi,
ttl: ttl
}) do
channel_id = proccess_id_to_record(channel)
from_id = proccess_id_to_record(from)
{:ok,
[
channel_id,
from_id,
payload,
poi,
ttl,
fee,
nonce
], :channel_slash_tx}
end
def process_tx_fields(%ChannelSnapshotSoloTx{
channel_id: channel,
from_id: from,
payload: payload,
ttl: ttl,
fee: fee,
nonce: nonce
}) do
channel_id = proccess_id_to_record(channel)
from_id = proccess_id_to_record(from)
{:ok, [channel_id, from_id, payload, ttl, fee, nonce], :channel_snapshot_solo_tx}
end
def process_tx_fields(%ChannelWithdrawTx{
channel_id: channel,
to_id: to,
amount: amount,
ttl: ttl,
fee: fee,
nonce: nonce,
state_hash: <<"st_", state_hash::binary>>,
round: round
}) do
decoded_state_hash = Encoding.decode_base58c(state_hash)
channel_id = proccess_id_to_record(channel)
to_id = proccess_id_to_record(to)
{:ok, [channel_id, to_id, amount, ttl, fee, decoded_state_hash, round, nonce],
:channel_withdraw_tx}
end
def process_tx_fields(%{
owner_id: owner_id,
nonce: nonce,
code: code,
auth_fun: auth_fun,
ct_version: ct_version,
fee: fee,
ttl: ttl,
gas: gas,
gas_price: gas_price,
call_data: call_data
}) do
owner_id_record = proccess_id_to_record(owner_id)
{:ok,
[
owner_id_record,
nonce,
code,
auth_fun,
ct_version,
fee,
ttl,
gas,
gas_price,
call_data
], :ga_attach_tx}
end
def process_tx_fields(%{
ga_id: ga_id,
auth_data: auth_data,
abi_version: abi_version,
fee: fee,
gas: gas,
gas_price: gas_price,
ttl: ttl,
tx: tx
}) do
ga_id_record = proccess_id_to_record(ga_id)
{:ok,
[
ga_id_record,
auth_data,
abi_version,
fee,
gas,
gas_price,
ttl,
tx
], :ga_meta_tx}
end
def process_tx_fields(%{
channel_id: channel_id,
round: round,
state_hash: <<"st_", state_hash::binary>>,
version: 1,
updates: updates
}) do
channel_id_record = proccess_id_to_record(channel_id)
decoded_state_hash = Encoding.decode_base58c(state_hash)
{:ok,
[
channel_id_record,
round,
updates,
decoded_state_hash
], :channel_offchain_tx}
end
def process_tx_fields(%{
channel_id: channel_id,
round: round,
state_hash: <<"st_", state_hash::binary>>,
version: 2
}) do
channel_id_record = proccess_id_to_record(channel_id)
decoded_state_hash = Encoding.decode_base58c(state_hash)
{:ok,
[
channel_id_record,
round,
decoded_state_hash
], :channel_offchain_tx_no_updates}
end
def process_tx_fields(tx) do
{:error, "Unknown or invalid tx: #{inspect(tx)}"}
end
def ttl_type_for_client(type) do
case type do
0 ->
:relative
1 ->
:absolute
end
end
defp proccess_id_to_record(id) when is_binary(id) do
{type, binary_data} =
id
|> Keys.public_key_to_binary(:with_prefix)
id =
case type do
"ak_" -> :account
"ok_" -> :oracle
"ct_" -> :contract
"nm_" -> :name
"cm_" -> :commitment
"ch_" -> :channel
end
{:id, id, binary_data}
end
end
| 21.540264 | 97 | 0.582722 |
ff19517055642c79fdd075d29829091c3b5a615f | 102 | exs | Elixir | test/cheerland_reservas_web/views/layout_view_test.exs | fskinner/cheerland-reservas | 48a782d7e49736a89b4fd2136c98906269b6351e | [
"MIT"
] | 2 | 2019-03-14T20:03:44.000Z | 2019-08-30T13:30:42.000Z | test/cheerland_reservas_web/views/layout_view_test.exs | fskinner/cheerland-reservas | 48a782d7e49736a89b4fd2136c98906269b6351e | [
"MIT"
] | 1 | 2021-05-08T08:23:18.000Z | 2021-05-08T08:23:18.000Z | test/cheerland_reservas_web/views/layout_view_test.exs | fskinner/cheerland-reservas | 48a782d7e49736a89b4fd2136c98906269b6351e | [
"MIT"
] | null | null | null | defmodule CheerlandReservasWeb.LayoutViewTest do
use CheerlandReservasWeb.ConnCase, async: true
end
| 25.5 | 48 | 0.862745 |
ff1997e97757c5d2bee811f70cc543872fb053ee | 2,185 | ex | Elixir | clients/testing/lib/google_api/testing/v1/model/ios_device.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/testing/lib/google_api/testing/v1/model/ios_device.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/testing/lib/google_api/testing/v1/model/ios_device.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Testing.V1.Model.IosDevice do
@moduledoc """
A single iOS device.
## Attributes
* `iosModelId` (*type:* `String.t`, *default:* `nil`) - Required. The id of the iOS device to be used. Use the TestEnvironmentDiscoveryService to get supported options.
* `iosVersionId` (*type:* `String.t`, *default:* `nil`) - Required. The id of the iOS major software version to be used. Use the TestEnvironmentDiscoveryService to get supported options.
* `locale` (*type:* `String.t`, *default:* `nil`) - Required. The locale the test device used for testing. Use the TestEnvironmentDiscoveryService to get supported options.
* `orientation` (*type:* `String.t`, *default:* `nil`) - Required. How the device is oriented during the test. Use the TestEnvironmentDiscoveryService to get supported options.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:iosModelId => String.t() | nil,
:iosVersionId => String.t() | nil,
:locale => String.t() | nil,
:orientation => String.t() | nil
}
field(:iosModelId)
field(:iosVersionId)
field(:locale)
field(:orientation)
end
defimpl Poison.Decoder, for: GoogleApi.Testing.V1.Model.IosDevice do
def decode(value, options) do
GoogleApi.Testing.V1.Model.IosDevice.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Testing.V1.Model.IosDevice do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.017857 | 190 | 0.716705 |
ff1a8218dd566639c4268df0ce0ef10e1b3f2225 | 619 | exs | Elixir | avatars/test/avatars_web/views/error_view_test.exs | derailed/ex_ray_tracers | a687baedde8c472ce426afba02902a336a403a7f | [
"Apache-2.0"
] | 9 | 2017-10-29T17:35:14.000Z | 2020-04-19T14:06:50.000Z | avatars/test/avatars_web/views/error_view_test.exs | derailed/ex_ray_tracers | a687baedde8c472ce426afba02902a336a403a7f | [
"Apache-2.0"
] | null | null | null | avatars/test/avatars_web/views/error_view_test.exs | derailed/ex_ray_tracers | a687baedde8c472ce426afba02902a336a403a7f | [
"Apache-2.0"
] | null | null | null | defmodule AvatarsWeb.ErrorViewTest do
use AvatarsWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.json" do
assert render(AvatarsWeb.ErrorView, "404.json", []) ==
%{errors: %{detail: "Page not found"}}
end
test "render 500.json" do
assert render(AvatarsWeb.ErrorView, "500.json", []) ==
%{errors: %{detail: "Internal server error"}}
end
test "render any other" do
assert render(AvatarsWeb.ErrorView, "505.json", []) ==
%{errors: %{detail: "Internal server error"}}
end
end
| 28.136364 | 66 | 0.654281 |
ff1a97a9792d2d69b90e6be55fd1e8aa72340c0c | 873 | ex | Elixir | elixir/lib/homework/transactions/transaction.ex | MitchellJeppson/DivvyPayHQ-web-homework | 01dc6a7ee0173c92633d93f7ba7e9162b1189985 | [
"MIT"
] | null | null | null | elixir/lib/homework/transactions/transaction.ex | MitchellJeppson/DivvyPayHQ-web-homework | 01dc6a7ee0173c92633d93f7ba7e9162b1189985 | [
"MIT"
] | null | null | null | elixir/lib/homework/transactions/transaction.ex | MitchellJeppson/DivvyPayHQ-web-homework | 01dc6a7ee0173c92633d93f7ba7e9162b1189985 | [
"MIT"
] | 1 | 2021-03-30T07:41:56.000Z | 2021-03-30T07:41:56.000Z | defmodule Homework.Transactions.Transaction do
use Ecto.Schema
import Ecto.Changeset
alias Homework.Merchants.Merchant
alias Homework.Users.User
@primary_key {:id, :binary_id, autogenerate: true}
schema "transactions" do
field(:amount, :integer)
field(:credit, :boolean, default: false)
field(:debit, :boolean, default: false)
field(:description, :string)
field(:company_id, :string)
belongs_to(:merchant, Merchant, type: :binary_id, foreign_key: :merchant_id)
belongs_to(:user, User, type: :binary_id, foreign_key: :user_id)
timestamps()
end
@doc false
def changeset(transaction, attrs) do
transaction
|> cast(attrs, [:user_id, :amount, :debit, :credit, :description, :merchant_id, :company_id])
|> validate_required([:user_id, :amount, :debit, :credit, :description, :merchant_id, :company_id])
end
end
| 31.178571 | 103 | 0.710195 |
ff1aa032e281508e6bb14019549d2dbe8a95aa53 | 537 | ex | Elixir | rocketpay/lib/rocketpay/accounts/withdraw.ex | Samuel-Ricardo/RocketPay | aeec9d00fd04de8b1541f5369dcdcb2cb5e03807 | [
"MIT"
] | 3 | 2021-02-24T02:55:37.000Z | 2021-07-14T18:55:40.000Z | rocketpay/lib/rocketpay/accounts/withdraw.ex | Samuel-Ricardo/RocketPay | aeec9d00fd04de8b1541f5369dcdcb2cb5e03807 | [
"MIT"
] | null | null | null | rocketpay/lib/rocketpay/accounts/withdraw.ex | Samuel-Ricardo/RocketPay | aeec9d00fd04de8b1541f5369dcdcb2cb5e03807 | [
"MIT"
] | 1 | 2021-03-13T01:32:40.000Z | 2021-03-13T01:32:40.000Z | defmodule Rocketpay.Accounts.Withdraw do
alias Ecto.Multi
alias Rocketpay.Accounts.Operation
alias Rocketpay.Repo
def call(params) do
params
|> Operation.call(:withdraw)
|> run_transaction()
end
# params = %{"id" => "37197d60-b600-4e46-be41-64d14d5cd052", "value" => "50.0"} |> Rocketpay.deposit()
defp run_transaction(multi) do
case Repo.transaction(multi) do
{:error, _operation, reason, _changes} -> {:error, reason}
{:ok, %{withdraw: account}} -> {:ok, account}
end
end
end
| 22.375 | 105 | 0.651769 |
ff1abd8d99b5720ce5e237859594ddaae5ef43a5 | 723 | ex | Elixir | lib/tw/http/response.ex | en30/tw | 10c705953e9b3bf97abf9b4c221486976eac32d4 | [
"MIT"
] | null | null | null | lib/tw/http/response.ex | en30/tw | 10c705953e9b3bf97abf9b4c221486976eac32d4 | [
"MIT"
] | null | null | null | lib/tw/http/response.ex | en30/tw | 10c705953e9b3bf97abf9b4c221486976eac32d4 | [
"MIT"
] | null | null | null | defmodule Tw.HTTP.Response do
@moduledoc """
HTTP response data structure.
"""
defstruct [:status, :headers, :body]
@type t :: %__MODULE__{
status: non_neg_integer(),
headers: list({binary, binary}),
body: nil | binary
}
@spec new(keyword) :: t
@doc false
def new(opts) do
opts = opts |> Keyword.update!(:headers, &Enum.map(&1, fn {k, v} -> {String.downcase(k), v} end))
struct!(__MODULE__, opts)
end
@spec get_header(t, binary) :: list(binary())
@doc false
def get_header(%__MODULE__{} = response, key) do
response.headers
|> Enum.reduce([], fn
{^key, value}, a -> [value | a]
_, a -> a
end)
|> Enum.reverse()
end
end
| 22.59375 | 101 | 0.572614 |
ff1ac1ec60a3b2cc0cd3494594027ea20016f021 | 909 | ex | Elixir | lib/tosh/application.ex | korczis/tosh | dc197f223b460e5769ceaa47d8058202aeab66b8 | [
"MIT"
] | null | null | null | lib/tosh/application.ex | korczis/tosh | dc197f223b460e5769ceaa47d8058202aeab66b8 | [
"MIT"
] | null | null | null | lib/tosh/application.ex | korczis/tosh | dc197f223b460e5769ceaa47d8058202aeab66b8 | [
"MIT"
] | null | null | null | defmodule Tosh.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
# List all child processes to be supervised
children = [
# Start the Ecto repository
Tosh.Repo,
# Start the endpoint when the application starts
ToshWeb.Endpoint
# Starts a worker by calling: Tosh.Worker.start_link(arg)
# {Tosh.Worker, arg},
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Tosh.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
ToshWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 28.40625 | 63 | 0.707371 |
ff1b68df4b98947d0b5adedaeb6c4a762fe7f3aa | 1,083 | ex | Elixir | lib/cadet/accounts/game_states.ex | Source-Academy-Game/cadet | a46af782fec8866b86a232a0bc27f4cec0b2269c | [
"Apache-2.0"
] | null | null | null | lib/cadet/accounts/game_states.ex | Source-Academy-Game/cadet | a46af782fec8866b86a232a0bc27f4cec0b2269c | [
"Apache-2.0"
] | 2 | 2020-04-22T09:06:38.000Z | 2020-04-22T09:15:33.000Z | lib/cadet/accounts/game_states.ex | Source-Academy-Game/cadet | a46af782fec8866b86a232a0bc27f4cec0b2269c | [
"Apache-2.0"
] | 1 | 2020-06-01T03:26:02.000Z | 2020-06-01T03:26:02.000Z | defmodule Cadet.Accounts.GameStates do
use Cadet, :context
alias Cadet.Accounts.User
@update_gamestate_roles ~w(student)a
# currently in this module no error handling function
# has been implemented yet
def user_game_states(user) do
user.game_states
end
def user_collectibles(user) do
user.game_states["collectibles"]
end
def user_save_data(user) do
user.game_states["completed_quests"]
end
def update(user = %User{role: role}, new_game_states) do
if role in @update_gamestate_roles do
changeset = cast(user, %{game_states: new_game_states}, [:game_states])
Repo.update!(changeset)
{:ok, nil}
else
{:error, {:forbidden, "Please try again later."}}
end
end
def clear(user = %User{role: role}) do
if role in @update_gamestate_roles do
changeset =
cast(user, %{game_states: %{collectibles: %{}, completed_quests: []}}, [
:game_states
])
Repo.update!(changeset)
{:ok, nil}
else
{:error, {:forbidden, "Please try again later."}}
end
end
end
| 24.066667 | 80 | 0.658356 |
ff1b6abf727240177f12cabf09e4c8ff5e18feb4 | 2,308 | ex | Elixir | youtube/groxio/tetris/lib/tetris_web.ex | jim80net/elixir_tutorial_projects | db19901a9305b297faa90642bebcc08455621b52 | [
"Unlicense"
] | null | null | null | youtube/groxio/tetris/lib/tetris_web.ex | jim80net/elixir_tutorial_projects | db19901a9305b297faa90642bebcc08455621b52 | [
"Unlicense"
] | null | null | null | youtube/groxio/tetris/lib/tetris_web.ex | jim80net/elixir_tutorial_projects | db19901a9305b297faa90642bebcc08455621b52 | [
"Unlicense"
] | null | null | null | defmodule TetrisWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use TetrisWeb, :controller
use TetrisWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: TetrisWeb
import Plug.Conn
import TetrisWeb.Gettext
alias TetrisWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/tetris_web/templates",
namespace: TetrisWeb
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def live_view do
quote do
use Phoenix.LiveView,
layout: {TetrisWeb.LayoutView, "live.html"}
unquote(view_helpers())
end
end
def live_component do
quote do
use Phoenix.LiveComponent
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
import Phoenix.LiveView.Router
end
end
def channel do
quote do
use Phoenix.Channel
import TetrisWeb.Gettext
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
# Import LiveView helpers (live_render, live_component, live_patch, etc)
import Phoenix.LiveView.Helpers
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import TetrisWeb.ErrorHelpers
import TetrisWeb.Gettext
alias TetrisWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 22.407767 | 78 | 0.680243 |
ff1b70a90fb05479cd34cca08a86ba91e8e56767 | 29,715 | exs | Elixir | test/ecto/query/planner_test.exs | machineloop/ecto | 26c1cb5591a27241067435aedec48c68d102fb8d | [
"Apache-2.0"
] | 1 | 2019-05-03T08:51:16.000Z | 2019-05-03T08:51:16.000Z | test/ecto/query/planner_test.exs | machineloop/ecto | 26c1cb5591a27241067435aedec48c68d102fb8d | [
"Apache-2.0"
] | null | null | null | test/ecto/query/planner_test.exs | machineloop/ecto | 26c1cb5591a27241067435aedec48c68d102fb8d | [
"Apache-2.0"
] | null | null | null | Code.require_file "../../../integration_test/support/types.exs", __DIR__
defmodule Ecto.Query.PlannerTest do
use ExUnit.Case, async: true
import Ecto.Query
alias Ecto.Query.Planner
alias Ecto.Query.JoinExpr
defmodule Comment do
use Ecto.Schema
schema "comments" do
field :text, :string
field :temp, :string, virtual: true
field :posted, :naive_datetime
field :uuid, :binary_id
field :crazy_comment, :string
belongs_to :post, Ecto.Query.PlannerTest.Post
belongs_to :crazy_post, Ecto.Query.PlannerTest.Post,
where: [title: "crazypost"]
belongs_to :crazy_post_with_list, Ecto.Query.PlannerTest.Post,
where: [title: {:in, ["crazypost1", "crazypost2"]}],
foreign_key: :crazy_post_id,
define_field: false
has_many :post_comments, through: [:post, :comments]
has_many :comment_posts, Ecto.Query.PlannerTest.CommentPost
end
end
defmodule CommentPost do
use Ecto.Schema
schema "comment_posts" do
belongs_to :comment, Comment
belongs_to :post, Post
belongs_to :special_comment, Comment, where: [text: nil]
field :deleted, :boolean
end
def inactive() do
dynamic([row], row.deleted)
end
end
defmodule Post do
use Ecto.Schema
@primary_key {:id, Custom.Permalink, []}
@schema_prefix "my_prefix"
schema "posts" do
field :title, :string, source: :post_title
field :text, :string
field :code, :binary
field :posted, :naive_datetime
field :visits, :integer
field :links, {:array, Custom.Permalink}
has_many :comments, Ecto.Query.PlannerTest.Comment
has_many :extra_comments, Ecto.Query.PlannerTest.Comment
has_many :special_comments, Ecto.Query.PlannerTest.Comment, where: [text: {:not, nil}]
many_to_many :crazy_comments, Comment, join_through: CommentPost, where: [text: "crazycomment"]
many_to_many :crazy_comments_with_list, Comment, join_through: CommentPost, where: [text: {:in, ["crazycomment1", "crazycomment2"]}]
end
end
defp plan(query, operation \\ :all) do
Planner.plan(query, operation, Ecto.TestAdapter, 0)
end
defp normalize(query, operation \\ :all) do
normalize_with_params(query, operation) |> elem(0)
end
defp normalize_with_params(query, operation \\ :all) do
{query, params, _key} = plan(query, operation)
{query, select} =
query
|> Planner.ensure_select(operation == :all)
|> Planner.normalize(operation, Ecto.TestAdapter, 0)
{query, params, select}
end
defp select_fields(fields, ix) do
for field <- fields do
{{:., [], [{:&, [], [ix]}, field]}, [], []}
end
end
test "plan: merges all parameters" do
union = from p in Post, select: {p.title, ^"union"}
subquery = from Comment, where: [text: ^"subquery"]
query =
from p in Post,
select: {p.title, ^"select"},
join: c in subquery(subquery),
on: c.text == ^"join",
left_join: d in assoc(p, :comments),
union_all: ^union,
windows: [foo: [partition_by: fragment("?", ^"windows")]],
where: p.title == ^"where",
group_by: p.title == ^"group_by",
having: p.title == ^"having",
order_by: [asc: fragment("?", ^"order_by")],
limit: ^0,
offset: ^1
{_query, params, _key} = plan(query)
assert params ==
["select", "subquery", "join", "where", "group_by", "having", "windows"] ++
["union", "order_by", 0, 1]
end
test "plan: checks from" do
assert_raise Ecto.QueryError, ~r"query must have a from expression", fn ->
plan(%Ecto.Query{})
end
end
test "plan: casts values" do
{_query, params, _key} = plan(Post |> where([p], p.id == ^"1"))
assert params == [1]
exception = assert_raise Ecto.Query.CastError, fn ->
plan(Post |> where([p], p.title == ^1))
end
assert Exception.message(exception) =~ "value `1` in `where` cannot be cast to type :string"
assert Exception.message(exception) =~ "where: p0.title == ^1"
end
test "plan: raises readable error on dynamic expressions/keyword lists" do
dynamic = dynamic([p], p.id == ^"1")
{_query, params, _key} = plan(Post |> where([p], ^dynamic))
assert params == [1]
assert_raise Ecto.QueryError, ~r/dynamic expressions can only be interpolated/, fn ->
plan(Post |> where([p], p.title == ^dynamic))
end
assert_raise Ecto.QueryError, ~r/keyword lists can only be interpolated/, fn ->
plan(Post |> where([p], p.title == ^[foo: 1]))
end
end
test "plan: casts and dumps custom types" do
permalink = "1-hello-world"
{_query, params, _key} = plan(Post |> where([p], p.id == ^permalink))
assert params == [1]
end
test "plan: casts and dumps binary ids" do
uuid = "00010203-0405-4607-8809-0a0b0c0d0e0f"
{_query, params, _key} = plan(Comment |> where([c], c.uuid == ^uuid))
assert params == [<<0, 1, 2, 3, 4, 5, 70, 7, 136, 9, 10, 11, 12, 13, 14, 15>>]
assert_raise Ecto.Query.CastError,
~r/`"00010203-0405-4607-8809"` cannot be dumped to type :binary_id/, fn ->
uuid = "00010203-0405-4607-8809"
plan(Comment |> where([c], c.uuid == ^uuid))
end
end
test "plan: casts and dumps custom types in left side of in-expressions" do
permalink = "1-hello-world"
{_query, params, _key} = plan(Post |> where([p], ^permalink in p.links))
assert params == [1]
message = ~r"value `\"1-hello-world\"` in `where` expected to be part of an array but matched type is :string"
assert_raise Ecto.Query.CastError, message, fn ->
plan(Post |> where([p], ^permalink in p.text))
end
end
test "plan: casts and dumps custom types in right side of in-expressions" do
datetime = ~N[2015-01-07 21:18:13.0]
{_query, params, _key} = plan(Comment |> where([c], c.posted in ^[datetime]))
assert params == [~N[2015-01-07 21:18:13]]
permalink = "1-hello-world"
{_query, params, _key} = plan(Post |> where([p], p.id in ^[permalink]))
assert params == [1]
datetime = ~N[2015-01-07 21:18:13.0]
{_query, params, _key} = plan(Comment |> where([c], c.posted in [^datetime]))
assert params == [~N[2015-01-07 21:18:13]]
permalink = "1-hello-world"
{_query, params, _key} = plan(Post |> where([p], p.id in [^permalink]))
assert params == [1]
{_query, params, _key} = plan(Post |> where([p], p.code in [^"abcd"]))
assert params == ["abcd"]
{_query, params, _key} = plan(Post |> where([p], p.code in ^["abcd"]))
assert params == ["abcd"]
end
test "plan: casts values on update_all" do
{_query, params, _key} = plan(Post |> update([p], set: [id: ^"1"]), :update_all)
assert params == [1]
{_query, params, _key} = plan(Post |> update([p], set: [title: ^nil]), :update_all)
assert params == [nil]
{_query, params, _key} = plan(Post |> update([p], set: [title: nil]), :update_all)
assert params == []
end
test "plan: joins" do
query = from(p in Post, join: c in "comments") |> plan |> elem(0)
assert hd(query.joins).source == {"comments", nil}
query = from(p in Post, join: c in Comment) |> plan |> elem(0)
assert hd(query.joins).source == {"comments", Comment}
query = from(p in Post, join: c in {"post_comments", Comment}) |> plan |> elem(0)
assert hd(query.joins).source == {"post_comments", Comment}
end
test "plan: joins associations" do
query = from(p in Post, join: assoc(p, :comments)) |> plan |> elem(0)
assert %JoinExpr{on: on, source: source, assoc: nil, qual: :inner} = hd(query.joins)
assert source == {"comments", Comment}
assert Macro.to_string(on.expr) == "&1.post_id() == &0.id()"
query = from(p in Post, left_join: assoc(p, :comments)) |> plan |> elem(0)
assert %JoinExpr{on: on, source: source, assoc: nil, qual: :left} = hd(query.joins)
assert source == {"comments", Comment}
assert Macro.to_string(on.expr) == "&1.post_id() == &0.id()"
query = from(p in Post, left_join: c in assoc(p, :comments), on: p.title == c.text) |> plan |> elem(0)
assert %JoinExpr{on: on, source: source, assoc: nil, qual: :left} = hd(query.joins)
assert source == {"comments", Comment}
assert Macro.to_string(on.expr) == "&1.post_id() == &0.id() and &0.title() == &1.text()"
end
test "plan: nested joins associations" do
query = from(c in Comment, left_join: assoc(c, :post_comments)) |> plan |> elem(0)
assert {{"comments", _, _}, {"comments", _, _}, {"posts", _, _}} = query.sources
assert [join1, join2] = query.joins
assert Enum.map(query.joins, & &1.ix) == [2, 1]
assert Macro.to_string(join1.on.expr) == "&2.id() == &0.post_id()"
assert Macro.to_string(join2.on.expr) == "&1.post_id() == &2.id()"
query = from(p in Comment, left_join: assoc(p, :post),
left_join: assoc(p, :post_comments)) |> plan |> elem(0)
assert {{"comments", _, _}, {"posts", _, _}, {"comments", _, _}, {"posts", _, _}} = query.sources
assert [join1, join2, join3] = query.joins
assert Enum.map(query.joins, & &1.ix) == [1, 3, 2]
assert Macro.to_string(join1.on.expr) == "&1.id() == &0.post_id()"
assert Macro.to_string(join2.on.expr) == "&3.id() == &0.post_id()"
assert Macro.to_string(join3.on.expr) == "&2.post_id() == &3.id()"
query = from(p in Comment, left_join: assoc(p, :post_comments),
left_join: assoc(p, :post)) |> plan |> elem(0)
assert {{"comments", _, _}, {"comments", _, _}, {"posts", _, _}, {"posts", _, _}} = query.sources
assert [join1, join2, join3] = query.joins
assert Enum.map(query.joins, & &1.ix) == [3, 1, 2]
assert Macro.to_string(join1.on.expr) == "&3.id() == &0.post_id()"
assert Macro.to_string(join2.on.expr) == "&1.post_id() == &3.id()"
assert Macro.to_string(join3.on.expr) == "&2.id() == &0.post_id()"
end
test "plan: joins associations with custom queries" do
query = from(p in Post, left_join: assoc(p, :special_comments)) |> plan |> elem(0)
assert {{"posts", _, _}, {"comments", _, _}} = query.sources
assert [join] = query.joins
assert join.ix == 1
assert Macro.to_string(join.on.expr) =~
~r"&1.post_id\(\) == &0.id\(\) and not[\s\(]is_nil\(&1.text\(\)\)\)?"
end
test "plan: nested joins associations with custom queries" do
query = from(p in Post,
join: c1 in assoc(p, :special_comments),
join: p2 in assoc(c1, :post),
join: cp in assoc(c1, :comment_posts),
join: c2 in assoc(cp, :special_comment))
|> plan
|> elem(0)
assert [join1, join2, join3, join4] = query.joins
assert {{"posts", _, _}, {"comments", _, _}, {"posts", _, _},
{"comment_posts", _, _}, {"comments", _, _}} = query.sources
assert Macro.to_string(join1.on.expr) =~
~r"&1.post_id\(\) == &0.id\(\) and not[\s\(]is_nil\(&1.text\(\)\)\)?"
assert Macro.to_string(join2.on.expr) == "&2.id() == &1.post_id()"
assert Macro.to_string(join3.on.expr) == "&3.comment_id() == &1.id()"
assert Macro.to_string(join4.on.expr) == "&4.id() == &3.special_comment_id() and is_nil(&4.text())"
end
test "plan: cannot associate without schema" do
query = from(p in "posts", join: assoc(p, :comments))
message = ~r"cannot perform association join on \"posts\" because it does not have a schema"
assert_raise Ecto.QueryError, message, fn ->
plan(query)
end
end
test "plan: requires an association field" do
query = from(p in Post, join: assoc(p, :title))
assert_raise Ecto.QueryError, ~r"could not find association `title`", fn ->
plan(query)
end
end
test "plan: generates a cache key" do
{_query, _params, key} = plan(from(Post, []))
assert key == [:all, 0, {"posts", Post, 27727487, "my_prefix"}]
query =
from(
p in Post,
prefix: "hello",
select: 1,
lock: "foo",
where: is_nil(nil),
or_where: is_nil(nil),
join: c in Comment,
prefix: "world",
preload: :comments
)
{_query, _params, key} = plan(%{query | prefix: "foo"})
assert key == [:all, 0,
{:lock, "foo"},
{:prefix, "foo"},
{:where, [{:and, {:is_nil, [], [nil]}}, {:or, {:is_nil, [], [nil]}}]},
{:join, [{:inner, {"comments", Comment, 38292156, "world"}, true}]},
{"posts", Post, 27727487, "hello"},
{:select, 1}]
end
test "plan: generates a cache key for in based on the adapter" do
query = from(p in Post, where: p.id in ^[1, 2, 3])
{_query, _params, key} = Planner.plan(query, :all, Ecto.TestAdapter, 0)
assert key == :nocache
end
test "plan: normalizes prefixes" do
# No schema prefix in from
{query, _, _} = from(Comment, select: 1) |> plan()
assert query.sources == {{"comments", Comment, nil}}
{query, _, _} = from(Comment, select: 1) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}}
{query, _, _} = from(Comment, prefix: "local", select: 1) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "local"}}
# Schema prefix in from
{query, _, _} = from(Post, select: 1) |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}}
{query, _, _} = from(Post, select: 1) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}}
{query, _, _} = from(Post, prefix: "local", select: 1) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "local"}}
# No schema prefix in join
{query, _, _} = from(c in Comment, join: assoc(c, :comment_posts)) |> plan()
assert query.sources == {{"comments", Comment, nil}, {"comment_posts", CommentPost, nil}}
{query, _, _} = from(c in Comment, join: assoc(c, :comment_posts)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}, {"comment_posts", CommentPost, "global"}}
{query, _, _} = from(c in Comment, join: assoc(c, :comment_posts), prefix: "local") |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}, {"comment_posts", CommentPost, "local"}}
# Schema prefix in join
{query, _, _} = from(c in Comment, join: assoc(c, :post)) |> plan()
assert query.sources == {{"comments", Comment, nil}, {"posts", Post, "my_prefix"}}
{query, _, _} = from(c in Comment, join: assoc(c, :post)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}, {"posts", Post, "my_prefix"}}
{query, _, _} = from(c in Comment, join: assoc(c, :post), prefix: "local") |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}, {"posts", Post, "local"}}
end
test "prepare: prepare combination queries" do
{%{combinations: [{_, query}]}, _, cache} = from(c in Comment, union: ^from(c in Comment)) |> plan()
assert query.sources == {{"comments", Comment, nil}}
assert %Ecto.Query.SelectExpr{expr: {:&, [], [0]}} = query.select
assert [:all, _, {:union, _}, _] = cache
{%{combinations: [{_, query}]}, _, cache} = from(c in Comment, union: ^from(c in Comment, where: c in ^[1, 2, 3])) |> plan()
assert query.sources == {{"comments", Comment, nil}}
assert %Ecto.Query.SelectExpr{expr: {:&, [], [0]}} = query.select
assert :nocache = cache
end
test "normalize: validates literal types" do
assert_raise Ecto.QueryError, fn ->
Comment |> where([c], c.text == 123) |> normalize()
end
assert_raise Ecto.QueryError, fn ->
Comment |> where([c], c.text == '123') |> normalize()
end
end
test "normalize: tagged types" do
{query, params, _select} = from(Post, []) |> select([p], type(^"1", :integer))
|> normalize_with_params
assert query.select.expr ==
%Ecto.Query.Tagged{type: :integer, value: {:^, [], [0]}, tag: :integer}
assert params == [1]
{query, params, _select} = from(Post, []) |> select([p], type(^"1", Custom.Permalink))
|> normalize_with_params
assert query.select.expr ==
%Ecto.Query.Tagged{type: :id, value: {:^, [], [0]}, tag: Custom.Permalink}
assert params == [1]
{query, params, _select} = from(Post, []) |> select([p], type(^"1", p.visits))
|> normalize_with_params
assert query.select.expr ==
%Ecto.Query.Tagged{type: :integer, value: {:^, [], [0]}, tag: :integer}
assert params == [1]
assert_raise Ecto.Query.CastError, ~r/value `"1"` in `select` cannot be cast to type Ecto.UUID/, fn ->
from(Post, []) |> select([p], type(^"1", Ecto.UUID)) |> normalize
end
end
test "normalize: assoc join with wheres that have regular filters" do
{_query, params, _select} =
from(post in Post,
join: comment in assoc(post, :crazy_comments),
join: post in assoc(comment, :crazy_post)) |> normalize_with_params()
assert params == ["crazycomment", "crazypost"]
end
test "normalize: assoc join with wheres that have in filters" do
{_query, params, _select} =
from(post in Post,
join: comment in assoc(post, :crazy_comments_with_list),
join: post in assoc(comment, :crazy_post_with_list)) |> normalize_with_params()
assert params == ["crazycomment1", "crazycomment2", "crazypost1", "crazypost2"]
{query, params, _} =
Ecto.assoc(%Comment{crazy_post_id: 1}, :crazy_post_with_list)
|> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) == "&0.id() == ^0 and &0.post_title() in ^(1, 2)"
assert params == [1, "crazypost1", "crazypost2"]
end
test "normalize: dumps in query expressions" do
assert_raise Ecto.QueryError, ~r"cannot be dumped", fn ->
normalize(from p in Post, where: p.posted == "2014-04-17 00:00:00")
end
end
test "normalize: validate fields" do
message = ~r"field `unknown` in `select` does not exist in schema Ecto.Query.PlannerTest.Comment"
assert_raise Ecto.QueryError, message, fn ->
query = from(Comment, []) |> select([c], c.unknown)
normalize(query)
end
message = ~r"field `temp` in `select` is a virtual field in schema Ecto.Query.PlannerTest.Comment"
assert_raise Ecto.QueryError, message, fn ->
query = from(Comment, []) |> select([c], c.temp)
normalize(query)
end
end
test "normalize: validate fields in left side of in expressions" do
query = from(Post, []) |> where([p], p.id in [1, 2, 3])
normalize(query)
message = ~r"value `\[1, 2, 3\]` cannot be dumped to type \{:array, :string\}"
assert_raise Ecto.QueryError, message, fn ->
query = from(Comment, []) |> where([c], c.text in [1, 2, 3])
normalize(query)
end
end
test "normalize: flattens and expands right side of in expressions" do
{query, params, _select} = where(Post, [p], p.id in [1, 2, 3]) |> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) == "&0.id() in [1, 2, 3]"
assert params == []
{query, params, _select} = where(Post, [p], p.id in [^1, 2, ^3]) |> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) == "&0.id() in [^0, 2, ^1]"
assert params == [1, 3]
{query, params, _select} = where(Post, [p], p.id in ^[]) |> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) == "&0.id() in ^(0, 0)"
assert params == []
{query, params, _select} = where(Post, [p], p.id in ^[1, 2, 3]) |> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) == "&0.id() in ^(0, 3)"
assert params == [1, 2, 3]
{query, params, _select} = where(Post, [p], p.title == ^"foo" and p.id in ^[1, 2, 3] and
p.title == ^"bar") |> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) ==
"&0.post_title() == ^0 and &0.id() in ^(1, 3) and &0.post_title() == ^4"
assert params == ["foo", 1, 2, 3, "bar"]
end
test "normalize: reject empty order by and group by" do
query = order_by(Post, [], []) |> normalize()
assert query.order_bys == []
query = order_by(Post, [], ^[]) |> normalize()
assert query.order_bys == []
query = group_by(Post, [], []) |> normalize()
assert query.group_bys == []
end
test "normalize: select" do
query = from(Post, []) |> normalize()
assert query.select.expr ==
{:&, [], [0]}
assert query.select.fields ==
select_fields([:id, :post_title, :text, :code, :posted, :visits, :links], 0)
query = from(Post, []) |> select([p], {p, p.title}) |> normalize()
assert query.select.fields ==
select_fields([:id, :post_title, :text, :code, :posted, :visits, :links], 0) ++
[{{:., [type: :string], [{:&, [], [0]}, :post_title]}, [], []}]
query = from(Post, []) |> select([p], {p.title, p}) |> normalize()
assert query.select.fields ==
select_fields([:id, :post_title, :text, :code, :posted, :visits, :links], 0) ++
[{{:., [type: :string], [{:&, [], [0]}, :post_title]}, [], []}]
query =
from(Post, [])
|> join(:inner, [_], c in Comment)
|> preload([_, c], comments: c)
|> select([p, _], {p.title, p})
|> normalize()
assert query.select.fields ==
select_fields([:id, :post_title, :text, :code, :posted, :visits, :links], 0) ++
select_fields([:id, :text, :posted, :uuid, :crazy_comment, :post_id, :crazy_post_id], 1) ++
[{{:., [type: :string], [{:&, [], [0]}, :post_title]}, [], []}]
end
test "normalize: select on schemaless" do
assert_raise Ecto.QueryError, ~r"need to explicitly pass a :select clause in query", fn ->
from("posts", []) |> normalize()
end
end
test "normalize: select with struct/2" do
assert_raise Ecto.QueryError, ~r"struct/2 in select expects a source with a schema", fn ->
"posts" |> select([p], struct(p, [:id, :title])) |> normalize()
end
query = Post |> select([p], struct(p, [:id, :title])) |> normalize()
assert query.select.expr == {:&, [], [0]}
assert query.select.fields == select_fields([:id, :post_title], 0)
query = Post |> select([p], {struct(p, [:id, :title]), p.title}) |> normalize()
assert query.select.fields ==
select_fields([:id, :post_title], 0) ++
[{{:., [type: :string], [{:&, [], [0]}, :post_title]}, [], []}]
query =
Post
|> join(:inner, [_], c in Comment)
|> select([p, c], {p, struct(c, [:id, :text])})
|> normalize()
assert query.select.fields ==
select_fields([:id, :post_title, :text, :code, :posted, :visits, :links], 0) ++
select_fields([:id, :text], 1)
end
test "normalize: select with struct/2 on assoc" do
query =
Post
|> join(:inner, [_], c in Comment)
|> select([p, c], struct(p, [:id, :title, comments: [:id, :text]]))
|> preload([p, c], comments: c)
|> normalize()
assert query.select.expr == {:&, [], [0]}
assert query.select.fields ==
select_fields([:id, :post_title], 0) ++
select_fields([:id, :text], 1)
query =
Post
|> join(:inner, [_], c in Comment)
|> select([p, c], struct(p, [:id, :title, comments: [:id, :text, post: :id], extra_comments: :id]))
|> preload([p, c], comments: {c, post: p}, extra_comments: c)
|> normalize()
assert query.select.expr == {:&, [], [0]}
assert query.select.fields ==
select_fields([:id, :post_title], 0) ++
select_fields([:id, :text], 1) ++
select_fields([:id], 0) ++
select_fields([:id], 1)
end
test "normalize: select with map/2" do
query = Post |> select([p], map(p, [:id, :title])) |> normalize()
assert query.select.expr == {:&, [], [0]}
assert query.select.fields == select_fields([:id, :post_title], 0)
query = Post |> select([p], {map(p, [:id, :title]), p.title}) |> normalize()
assert query.select.fields ==
select_fields([:id, :post_title], 0) ++
[{{:., [type: :string], [{:&, [], [0]}, :post_title]}, [], []}]
query =
Post
|> join(:inner, [_], c in Comment)
|> select([p, c], {p, map(c, [:id, :text])})
|> normalize()
assert query.select.fields ==
select_fields([:id, :post_title, :text, :code, :posted, :visits, :links], 0) ++
select_fields([:id, :text], 1)
end
test "normalize: select with map/2 on assoc" do
query =
Post
|> join(:inner, [_], c in Comment)
|> select([p, c], map(p, [:id, :title, comments: [:id, :text]]))
|> preload([p, c], comments: c)
|> normalize()
assert query.select.expr == {:&, [], [0]}
assert query.select.fields ==
select_fields([:id, :post_title], 0) ++
select_fields([:id, :text], 1)
query =
Post
|> join(:inner, [_], c in Comment)
|> select([p, c], map(p, [:id, :title, comments: [:id, :text, post: :id], extra_comments: :id]))
|> preload([p, c], comments: {c, post: p}, extra_comments: c)
|> normalize()
assert query.select.expr == {:&, [], [0]}
assert query.select.fields ==
select_fields([:id, :post_title], 0) ++
select_fields([:id, :text], 1) ++
select_fields([:id], 0) ++
select_fields([:id], 1)
end
test "normalize: windows" do
assert_raise Ecto.QueryError, ~r"unknown window :v given to over/2", fn ->
Comment
|> windows([c], w: [partition_by: c.id])
|> select([c], count(c.id) |> over(:v))
|> normalize()
end
end
test "normalize: preload errors" do
message = ~r"the binding used in `from` must be selected in `select` when using `preload`"
assert_raise Ecto.QueryError, message, fn ->
Post |> preload(:hello) |> select([p], p.title) |> normalize
end
message = ~r"invalid query has specified more bindings than"
assert_raise Ecto.QueryError, message, fn ->
Post |> preload([p, c], comments: c) |> normalize
end
end
test "normalize: preload assoc merges" do
{_, _, select} =
from(p in Post)
|> join(:inner, [p], c in assoc(p, :comments))
|> join(:inner, [_, c], cp in assoc(c, :comment_posts))
|> join(:inner, [_, c], ip in assoc(c, :post))
|> preload([_, c, cp, _], comments: {c, comment_posts: cp})
|> preload([_, c, _, ip], comments: {c, post: ip})
|> normalize_with_params()
assert select.assocs == [comments: {1, [comment_posts: {2, []}, post: {3, []}]}]
end
test "normalize: preload assoc errors" do
message = ~r"field `Ecto.Query.PlannerTest.Post.not_field` in preload is not an association"
assert_raise Ecto.QueryError, message, fn ->
query = from(p in Post, join: c in assoc(p, :comments), preload: [not_field: c])
normalize(query)
end
message = ~r"requires an inner, left or lateral join, got right join"
assert_raise Ecto.QueryError, message, fn ->
query = from(p in Post, right_join: c in assoc(p, :comments), preload: [comments: c])
normalize(query)
end
end
test "normalize: fragments do not support preloads" do
query = from p in Post, join: c in fragment("..."), preload: [comments: c]
assert_raise Ecto.QueryError, ~r/can only preload sources with a schema/, fn ->
normalize(query)
end
end
test "normalize: all does not allow updates" do
message = ~r"`all` does not allow `update` expressions"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, update: [set: [name: "foo"]]) |> normalize(:all)
end
end
test "normalize: all does not allow bindings in order bys when having combinations" do
assert_raise Ecto.QueryError, ~r"cannot use bindings in `order_by` when using `union_all`", fn ->
posts_query = from(post in Post, select: post.id)
posts_query
|> union_all(^posts_query)
|> order_by([post], post.id)
|> normalize(:all)
end
end
test "normalize: update all only allow filters and checks updates" do
message = ~r"`update_all` requires at least one field to be updated"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, select: p, update: []) |> normalize(:update_all)
end
message = ~r"duplicate field `title` for `update_all`"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, select: p, update: [set: [title: "foo", title: "bar"]])
|> normalize(:update_all)
end
message = ~r"`update_all` allows only `where` and `join` expressions"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, order_by: p.title, update: [set: [title: "foo"]]) |> normalize(:update_all)
end
end
test "normalize: delete all only allow filters and forbids updates" do
message = ~r"`delete_all` does not allow `update` expressions"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, update: [set: [name: "foo"]]) |> normalize(:delete_all)
end
message = ~r"`delete_all` allows only `where` and `join` expressions"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, order_by: p.title) |> normalize(:delete_all)
end
end
end
| 38.843137 | 138 | 0.586337 |
ff1ba5f6d0f01aa02a802f922664652bec5ea4af | 565 | exs | Elixir | aoc-2019/day1/mix.exs | danurna/elixir-playground | 6acb40e513d8ab324368b3ec5151b0a4fd88f849 | [
"MIT"
] | null | null | null | aoc-2019/day1/mix.exs | danurna/elixir-playground | 6acb40e513d8ab324368b3ec5151b0a4fd88f849 | [
"MIT"
] | null | null | null | aoc-2019/day1/mix.exs | danurna/elixir-playground | 6acb40e513d8ab324368b3ec5151b0a4fd88f849 | [
"MIT"
] | null | null | null | defmodule Day1.MixProject do
use Mix.Project
def project do
[
app: :day1,
version: "0.1.0",
elixir: "~> 1.9",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
end
| 19.482759 | 87 | 0.571681 |
ff1ba85675c1e237e186bbf9e01a2ae741963b8b | 145 | ex | Elixir | lib/pubsub/event.ex | michaeljguarino/piazza_core | 9cb24c59687db068cd16d2de94066dc85f871bc0 | [
"MIT"
] | null | null | null | lib/pubsub/event.ex | michaeljguarino/piazza_core | 9cb24c59687db068cd16d2de94066dc85f871bc0 | [
"MIT"
] | null | null | null | lib/pubsub/event.ex | michaeljguarino/piazza_core | 9cb24c59687db068cd16d2de94066dc85f871bc0 | [
"MIT"
] | null | null | null | defmodule Piazza.PubSub.Event do
defmacro __using__(_) do
quote do
defstruct [:item, :actor, :context, :source_pid]
end
end
end | 20.714286 | 54 | 0.682759 |
ff1bb2c212bef6b901a39b4598b51f9a368cb7ef | 2,368 | exs | Elixir | test/acceptance/ast/line_breaks_test.exs | maartenvanvliet/earmark | 6e0e4ae202728fa7b1f0e3e10ef982a167120450 | [
"Apache-1.1"
] | null | null | null | test/acceptance/ast/line_breaks_test.exs | maartenvanvliet/earmark | 6e0e4ae202728fa7b1f0e3e10ef982a167120450 | [
"Apache-1.1"
] | null | null | null | test/acceptance/ast/line_breaks_test.exs | maartenvanvliet/earmark | 6e0e4ae202728fa7b1f0e3e10ef982a167120450 | [
"Apache-1.1"
] | null | null | null | defmodule Acceptance.Ast.LineBreaksTest do
use ExUnit.Case, async: true
import Support.Helpers, only: [as_ast: 1, parse_html: 1]
describe "Forced Line Breaks" do
test "with two spaces" do
markdown = "The \nquick"
html = "<p>The<br />quick</p>\n"
ast = parse_html(html)
messages = []
assert as_ast(markdown) == {:ok, [ast], messages}
end
test "or more spaces" do
markdown = "The \nquick"
html = "<p>The<br />quick</p>\n"
ast = parse_html(html)
messages = []
assert as_ast(markdown) == {:ok, [ast], messages}
end
test "or in some lines" do
markdown = "The \nquick \nbrown"
html = "<p>The<br />quick<br />brown</p>\n"
ast = parse_html(html)
messages = []
assert as_ast(markdown) == {:ok, [ast], messages}
end
test "and in list items" do
markdown = "* The \nquick"
html = "<ul>\n<li>The<br />quick</li>\n</ul>\n"
ast = parse_html(html)
messages = []
assert as_ast(markdown) == {:ok, [ast], messages}
end
end
describe "No Forced Line Breaks" do
test "with only one space" do
markdown = "The \nquick"
html = "<p>The \nquick</p>\n"
ast = parse_html(html)
messages = []
assert as_ast(markdown) == {:ok, [ast], messages}
end
test "or whitspace lines" do
markdown = "The\n \nquick"
html = "<p>The</p>\n<p>quick</p>\n"
ast = parse_html(html)
messages = []
assert as_ast(markdown) == {:ok, ast, messages}
end
test "or inside the line" do
markdown = "The quick\nbrown"
html = "<p>The quick\nbrown</p>\n"
ast = parse_html(html)
messages = []
assert as_ast(markdown) == {:ok, [ast], messages}
end
test "or at the end of input" do
markdown = "The\nquick "
html = "<p>The\nquick </p>\n"
ast = parse_html(html)
messages = []
assert as_ast(markdown) == {:ok, [ast], messages}
end
test "or in code blocks" do
markdown = "```\nThe \nquick\n```"
html = "<pre><code>The \nquick</code></pre>\n"
ast = parse_html(html)
messages = []
assert as_ast(markdown) == {:ok, [ast], messages}
end
end
end
# SPDX-License-Identifier: Apache-2.0
| 27.534884 | 58 | 0.538851 |
ff1bb78555e1d3b5b5d95691256f47fc27f820a4 | 704 | exs | Elixir | .credo.exs | kianmeng/ex_health | f7311d5e23e8cfbb6dd91381a52a420345331535 | [
"MIT"
] | 9 | 2019-03-16T21:24:57.000Z | 2021-12-29T21:31:16.000Z | .credo.exs | kianmeng/ex_health | f7311d5e23e8cfbb6dd91381a52a420345331535 | [
"MIT"
] | 13 | 2018-12-11T16:43:26.000Z | 2022-02-09T23:08:01.000Z | .credo.exs | kianmeng/ex_health | f7311d5e23e8cfbb6dd91381a52a420345331535 | [
"MIT"
] | 4 | 2019-09-18T00:54:04.000Z | 2021-05-09T05:30:47.000Z | %{
configs: [
%{
name: "default",
files: %{
included: ["lib/", "src/"],
excluded: ["test"]
},
checks: [
{Credo.Check.Consistency.TabsOrSpaces},
# For others you can also set parameters
{Credo.Check.Readability.MaxLineLength, priority: :low, max_length: 100},
# You can also customize the exit_status of each check.
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
{Credo.Check.Design.TagTODO, exit_status: 2},
# To deactivate a check:
# Put `false` as second element:
{Credo.Check.Design.TagFIXME, false},
]
}
]
} | 27.076923 | 81 | 0.5625 |
ff1bfd337e91f0ff1dde91f961b664cb329df337 | 862 | exs | Elixir | mix.exs | sanderhahn/nimble_totp | 1df26cb7470bc1fee34ee449f2ee222f0c02852e | [
"Apache-2.0"
] | 238 | 2020-07-21T23:52:11.000Z | 2022-03-31T08:23:32.000Z | mix.exs | sanderhahn/nimble_totp | 1df26cb7470bc1fee34ee449f2ee222f0c02852e | [
"Apache-2.0"
] | 13 | 2020-08-10T08:37:24.000Z | 2022-02-02T07:42:27.000Z | mix.exs | sanderhahn/nimble_totp | 1df26cb7470bc1fee34ee449f2ee222f0c02852e | [
"Apache-2.0"
] | 12 | 2020-08-10T08:23:35.000Z | 2022-02-24T06:59:50.000Z | defmodule NimbleTOTP.MixProject do
use Mix.Project
@version "0.1.2"
@repo_url "https://github.com/dashbitco/nimble_totp"
def project do
[
app: :nimble_totp,
version: @version,
elixir: "~> 1.6",
start_permanent: Mix.env() == :prod,
deps: deps(),
# Hex
package: package(),
description: "A tiny library for time-based one time passwords (TOTP)",
# Docs
name: "NimbleTOTP",
docs: docs()
]
end
def application do
[
extra_applications: [:crypto]
]
end
defp deps do
[
{:ex_doc, ">= 0.19.0", only: :docs}
]
end
defp package do
[
licenses: ["Apache-2.0"],
links: %{"GitHub" => @repo_url}
]
end
defp docs do
[
main: "NimbleTOTP",
source_ref: "v#{@version}",
source_url: @repo_url
]
end
end
| 16.576923 | 77 | 0.544084 |
ff1c2323bc8d74425b0ddb6e59821e6f9f24034c | 28,832 | ex | Elixir | lib/xema/validator.ex | romul/xema | 8273e10645cf54e8765a197b1ff0c097994275d9 | [
"MIT"
] | null | null | null | lib/xema/validator.ex | romul/xema | 8273e10645cf54e8765a197b1ff0c097994275d9 | [
"MIT"
] | null | null | null | lib/xema/validator.ex | romul/xema | 8273e10645cf54e8765a197b1ff0c097994275d9 | [
"MIT"
] | null | null | null | defmodule Xema.Validator do
@moduledoc """
This module contains all validators to check data against a schema.
"""
use Xema.Format
import Xema.Utils
alias Xema.{Behaviour, Ref, Schema}
@compile {
:inline,
do_validate: 3, get_type: 1, struct?: 1, struct?: 2, type?: 2, types: 2, validate_by: 4
}
@type result :: :ok | {:error, map}
@types [
:atom,
:struct,
:boolean,
:float,
:integer,
:keyword,
:list,
:map,
nil,
:number,
:string,
:tuple
]
@doc """
A callback for custom validators. For an example see:
[Custom validators](examples.html#custom-validator)
"""
@callback validate(any) :: :ok | result
@doc """
Validates `data` against the given `schema`.
"""
@spec validate(Behaviour.t() | Schema.t(), any) :: result
def validate(schema, data), do: validate(schema, data, [])
@doc false
@spec validate(Behaviour.t() | Schema.t(), any, keyword) :: result
def validate(%Schema{} = schema, data, opts),
do: do_validate(schema, data, opts)
def validate(%{schema: schema} = xema, data, opts),
do:
do_validate(
schema,
data,
opts
|> Keyword.put_new(:root, xema)
|> Keyword.put_new(:master, xema)
)
@spec do_validate(Behaviour.t() | Schema.t(), any, keyword) :: result
defp do_validate(%Schema{type: true}, _, _), do: :ok
defp do_validate(%Schema{type: false}, _, _), do: {:error, %{type: false}}
defp do_validate(%Schema{type: types} = schema, value, opts) when is_list(types) do
with {:ok, type} <- types(schema, value),
:ok <- validate_by(:default, schema, value, opts),
:ok <- validate_by(type, schema, value, opts),
:ok <- custom_validator(schema, value),
do: :ok
end
defp do_validate(%Schema{type: :any, ref: nil} = schema, value, opts) do
with type <- get_type(value),
:ok <- validate_by(:default, schema, value, opts),
:ok <- validate_by(type, schema, value, opts),
:ok <- custom_validator(schema, value),
do: :ok
end
defp do_validate(%Schema{type: :any, ref: ref}, value, opts), do: Ref.validate(ref, value, opts)
defp do_validate(%Schema{type: type} = schema, value, opts) do
with :ok <- type(schema, value),
:ok <- validate_by(:default, schema, value, opts),
:ok <- validate_by(type, schema, value, opts),
:ok <- custom_validator(schema, value),
do: :ok
end
defp do_validate(%{schema: schema}, value, opts),
do: do_validate(schema, value, opts)
defp validate_by(:default, schema, value, opts) do
with :ok <- enum(schema, value),
:ok <- not_(schema, value, opts),
:ok <- all_of(schema, value, opts),
:ok <- any_of(schema, value, opts),
:ok <- one_of(schema, value, opts),
:ok <- const(schema, value),
:ok <- if_then_else(schema, value, opts),
do: :ok
end
defp validate_by(:string, schema, value, _opts) do
with :ok <- min_length(schema, value),
:ok <- max_length(schema, value),
:ok <- pattern(schema, value),
:ok <- format(schema, value),
do: :ok
end
defp validate_by(:tuple, schema, value, opts),
do: validate_by(:list, schema, value, opts)
defp validate_by(:list, schema, value, opts) do
with :ok <- min_items(schema, value),
:ok <- max_items(schema, value),
:ok <- unique(schema, value),
:ok <- items(schema, value, opts),
:ok <- contains(schema, value, opts),
do: :ok
end
defp validate_by(:struct, schema, value, opts) do
with :ok <- module(schema, value),
:ok <- validate_by(:map, schema, value, opts),
do: :ok
end
defp validate_by(:map, schema, value, opts) do
with :ok <- size(schema, value),
:ok <- keys(schema, value),
:ok <- required(schema, value),
:ok <- property_names(schema, value, opts),
:ok <- dependencies(schema, value, opts),
{:ok, patts_rest} <- patterns(schema, value, opts),
{:ok, props_rest} <- properties(schema, value, opts),
value <- intersection(props_rest, patts_rest),
:ok <- additionals(schema, value, opts),
do: :ok
end
defp validate_by(:keyword, schema, value, opts) do
with :ok <- dependencies(schema, value, opts),
value <- Enum.into(value, %{}),
:ok <- size(schema, value),
:ok <- keys(schema, value),
:ok <- required(schema, value),
:ok <- property_names(schema, value, opts),
{:ok, patts_rest} <- patterns(schema, value, opts),
{:ok, props_rest} <- properties(schema, value, opts),
value <- intersection(props_rest, patts_rest),
:ok <- additionals(schema, value, opts),
do: :ok
end
defp validate_by(:integer, schema, value, opts),
do: validate_by(:number, schema, value, opts)
defp validate_by(:float, schema, value, opts),
do: validate_by(:number, schema, value, opts)
defp validate_by(:number, schema, value, opts) do
with :ok <- minimum(schema, value),
:ok <- maximum(schema, value),
:ok <- exclusive_maximum(schema, value),
:ok <- exclusive_minimum(schema, value),
:ok <- multiple_of(schema, value),
:ok <- validate_by(:default, schema, value, opts),
do: :ok
end
defp validate_by(:boolean, _schema, _value, _opts), do: :ok
defp validate_by(nil, _schema, _value, _opts), do: :ok
defp validate_by(:atom, _schema, _value, _opts), do: :ok
#
# Schema type handling
#
defp get_type([]), do: :list
defp get_type(value),
do: Enum.find(@types, fn type -> type?(type, value) end)
@spec type(Schema.t() | atom, any) :: result
defp type(%{type: type}, value) do
case type?(type, value) do
true -> :ok
false -> {:error, %{type: type, value: value}}
end
end
@spec type?(atom, any) :: boolean
defp type?(:any, _value), do: true
defp type?(:atom, value), do: is_atom(value)
defp type?(:boolean, value), do: is_boolean(value)
defp type?(:string, value), do: is_binary(value)
defp type?(:tuple, value), do: is_tuple(value)
defp type?(:keyword, value), do: Keyword.keyword?(value)
defp type?(:number, value), do: is_number(value)
defp type?(:integer, value), do: is_integer(value)
defp type?(:float, value), do: is_float(value)
defp type?(:map, value), do: is_map(value)
defp type?(:list, value), do: is_list(value)
defp type?(:struct, value), do: is_map(value) && struct?(value)
defp type?(nil, nil), do: true
defp type?(_, _), do: false
@spec struct?(any) :: boolean
defp struct?(%_{}), do: true
defp struct?(_), do: false
@spec struct?(any, atom) :: boolean
defp struct?(%module{}, module), do: true
defp struct?(_, _), do: false
@spec types(Schema.t(), any) :: {:ok, atom} | {:error, map}
defp types(%{type: list}, value) do
case Enum.find(list, :not_found, fn type -> type?(type, value) end) do
:not_found -> {:error, %{type: list, value: value}}
found -> {:ok, found}
end
end
#
# Validators
#
@spec const(Schema.t(), any) :: result
defp const(%{const: nil}, _value), do: :ok
defp const(%{const: :__nil__}, nil), do: :ok
defp const(%{const: :__nil__}, value),
do: {:error, %{const: nil, value: value}}
defp const(%{const: const}, const), do: :ok
defp const(%{const: const}, value) when is_number(const) do
case const == value do
true -> :ok
false -> {:error, %{const: const, value: value}}
end
end
defp const(%{const: const}, value),
do: {:error, %{const: const, value: value}}
@spec if_then_else(Behaviour.t() | Schema.t(), any, keyword) :: result
defp if_then_else(%{if: nil}, _value, _opts), do: :ok
defp if_then_else(%{then: nil, else: nil}, _value, _opts), do: :ok
defp if_then_else(%{if: schema_if, then: schema_then, else: schema_else}, value, opts) do
case Xema.valid?(schema_if, value) do
true ->
if_then_else(:then, schema_then, value, opts)
false ->
if_then_else(:else, schema_else, value, opts)
end
end
@spec if_then_else(atom, Schema.t() | nil, any) :: result
defp if_then_else(_key, nil, _value, _opts), do: :ok
defp if_then_else(key, schema, value, opts) do
case do_validate(schema, value, opts) do
:ok -> :ok
{:error, reason} -> {:error, Map.new([{key, reason}])}
end
end
@spec property_names(Behaviour.t() | Schema.t(), map, keyword) :: result
defp property_names(%{property_names: nil}, _map, _opts), do: :ok
defp property_names(%{property_names: schema}, map, opts) do
map
|> Map.keys()
|> Enum.reduce([], fn
key, acc when is_binary(key) ->
case do_validate(schema, key, opts) do
:ok -> acc
{:error, reason} -> [{key, reason} | acc]
end
key, acc when is_atom(key) ->
case do_validate(schema, Atom.to_string(key), opts) do
:ok -> acc
{:error, reason} -> [{key, reason} | acc]
end
_, acc ->
acc
end)
|> case do
[] -> :ok
errors -> {:error, %{value: Map.keys(map), property_names: Enum.reverse(errors)}}
end
end
@spec enum(Schema.t(), any) :: result
defp enum(%{enum: nil}, _element), do: :ok
defp enum(%{enum: enum}, value) when is_integer(value) do
case Enum.member?(enum, value) || Enum.member?(enum, value * 1.0) do
true -> :ok
false -> {:error, %{enum: enum, value: value}}
end
end
defp enum(%{enum: enum}, value) when is_float(value) do
case Enum.member?(enum, value) do
true ->
:ok
false ->
case zero_terminated_float?(value) && Enum.member?(enum, trunc(value)) do
true -> :ok
false -> {:error, %{enum: enum, value: value}}
end
end
end
defp enum(%{enum: enum}, value) do
case Enum.member?(enum, value) do
true -> :ok
false -> {:error, %{enum: enum, value: value}}
end
end
defp zero_terminated_float?(value) when is_float(value), do: Float.round(value) == value
@spec module(Schema.t(), any) :: result
defp module(%{module: nil}, _val), do: :ok
defp module(%{module: module}, val) do
case struct?(val, module) do
true -> :ok
false -> {:error, %{module: module, value: val}}
end
end
@spec not_(Schema.t(), any, keyword) :: result
defp not_(%{not: nil}, _value, _opts), do: :ok
defp not_(%{not: schema}, value, opts) do
case do_validate(schema, value, opts) do
:ok -> {:error, %{not: :ok, value: value}}
_ -> :ok
end
end
@spec any_of(Schema.t(), any, keyword) :: result
defp any_of(%{any_of: nil}, _value, _opts), do: :ok
defp any_of(%{any_of: schemas}, value, opts) do
case do_any_of(schemas, value, opts) do
:ok ->
:ok
{:error, errors} ->
{:error, %{any_of: Enum.reverse(errors), value: value}}
end
end
@spec do_any_of(list, any, keyword, [map]) :: :ok | {:error, list(map)}
defp do_any_of(schemas, value, opts, errors \\ [])
defp do_any_of([], _value, _opts, errors), do: {:error, errors}
defp do_any_of([schema | schemas], value, opts, errors) do
with {:error, error} <- do_validate(schema, value, opts) do
do_any_of(schemas, value, opts, [error | errors])
end
end
@spec all_of(Schema.t(), any, keyword) :: result
defp all_of(%{all_of: nil}, _value, _opts), do: :ok
defp all_of(%{all_of: schemas}, value, opts) do
with {:error, errors} <- do_all_of(schemas, value, opts) do
{:error, %{all_of: errors, value: value}}
end
end
@spec do_all_of(list, any, keyword, [map]) :: result
defp do_all_of(schemas, value, opts, errors \\ [])
defp do_all_of([], _value, _opts, []), do: :ok
defp do_all_of([], _value, _opts, errors),
do: {:error, Enum.reverse(errors)}
defp do_all_of([schema | schemas], value, opts, errors) do
case do_validate(schema, value, opts) do
:ok ->
do_all_of(schemas, value, opts, errors)
{:error, error} ->
do_all_of(schemas, value, opts, [error | errors])
end
end
@spec one_of(Schema.t(), any, keyword) :: result
defp one_of(%{one_of: nil}, _value, _opts), do: :ok
defp one_of(%{one_of: schemas}, value, opts) do
case do_one_of(schemas, value, opts) do
%{success: [], errors: errors} ->
{:error, %{one_of: {:error, Enum.reverse(errors)}, value: value}}
%{success: [_]} ->
:ok
%{success: success} ->
{:error, %{one_of: {:ok, Enum.reverse(success)}, value: value}}
end
end
@spec do_one_of(list, any, keyword) :: %{errors: [map], success: [map]}
defp do_one_of(schemas, value, opts),
do:
schemas
|> Enum.with_index()
|> Enum.reduce(
%{errors: [], success: []},
fn {schema, index}, %{errors: errors, success: success} ->
case do_validate(schema, value, opts) do
:ok ->
%{errors: errors, success: [index | success]}
{:error, error} ->
%{errors: [error | errors], success: success}
end
end
)
@spec exclusive_maximum(Schema.t(), any) :: result
defp exclusive_maximum(%{exclusive_maximum: nil}, _value), do: :ok
defp exclusive_maximum(%{exclusive_maximum: max}, _value)
when is_boolean(max),
do: :ok
defp exclusive_maximum(%{exclusive_maximum: max}, value)
when value < max,
do: :ok
defp exclusive_maximum(%{exclusive_maximum: max}, value),
do: {:error, %{exclusive_maximum: max, value: value}}
@spec exclusive_minimum(Schema.t(), any) :: result
defp exclusive_minimum(%{exclusive_minimum: nil}, _value), do: :ok
defp exclusive_minimum(%{exclusive_minimum: min}, _value)
when is_boolean(min),
do: :ok
defp exclusive_minimum(%{exclusive_minimum: min}, value)
when value > min,
do: :ok
defp exclusive_minimum(%{exclusive_minimum: min}, value),
do: {:error, %{value: value, exclusive_minimum: min}}
@spec minimum(Schema.t(), any) :: result
defp minimum(%{minimum: nil}, _value), do: :ok
defp minimum(
%{minimum: minimum, exclusive_minimum: exclusive_minimum},
value
)
when is_number(value),
do: minimum(minimum, exclusive_minimum, value)
defp minimum(_, _), do: :ok
@spec minimum(number, boolean, number) :: result
defp minimum(minimum, _exclusive, value) when value > minimum, do: :ok
defp minimum(minimum, nil, value) when value == minimum, do: :ok
defp minimum(minimum, false, value) when value == minimum, do: :ok
defp minimum(minimum, nil, value),
do: {:error, %{value: value, minimum: minimum}}
defp minimum(minimum, exclusive, value),
do: {:error, %{value: value, minimum: minimum, exclusive_minimum: exclusive}}
@spec maximum(Schema.t(), any) :: result
defp maximum(%{maximum: nil}, _value), do: :ok
defp maximum(
%{maximum: maximum, exclusive_maximum: exclusive_maximum},
value
),
do: maximum(maximum, exclusive_maximum, value)
@spec maximum(number, boolean, number) :: result
defp maximum(maximum, _exclusive, value) when value < maximum, do: :ok
defp maximum(maximum, nil, value) when value == maximum, do: :ok
defp maximum(maximum, false, value) when value == maximum, do: :ok
defp maximum(maximum, nil, value),
do: {:error, %{value: value, maximum: maximum}}
defp maximum(maximum, exclusive, value),
do: {:error, %{value: value, maximum: maximum, exclusive_maximum: exclusive}}
@spec multiple_of(Schema.t(), number) :: result
defp multiple_of(%{multiple_of: nil} = _keywords, _value), do: :ok
defp multiple_of(%{multiple_of: multiple_of}, value) when is_number(value) do
x = value / multiple_of
case x - Float.floor(x) do
0.0 -> :ok
_ -> {:error, %{value: value, multiple_of: multiple_of}}
end
end
defp multiple_of(_, _), do: :ok
@spec min_length(Schema.t(), String.t()) :: result
defp min_length(%{min_length: nil}, _), do: :ok
defp min_length(%{min_length: min}, value) do
len = String.length(value)
case len >= min do
true -> :ok
false -> {:error, %{value: value, min_length: min}}
end
end
@spec max_length(Schema.t(), String.t()) :: result
defp max_length(%{max_length: nil}, _), do: :ok
defp max_length(%{max_length: max}, value) do
len = String.length(value)
case len <= max do
true -> :ok
false -> {:error, %{value: value, max_length: max}}
end
end
@spec pattern(Schema.t(), String.t()) :: result
defp pattern(%{pattern: nil}, _string), do: :ok
defp pattern(%{pattern: pattern}, string) do
case Regex.match?(pattern, string) do
true -> :ok
false -> {:error, %{value: string, pattern: pattern}}
end
end
@spec min_items(Schema.t(), list | tuple) :: result
defp min_items(%{min_items: nil}, _), do: :ok
defp min_items(%{min_items: min}, val) do
case size(val) >= min do
true -> :ok
false -> {:error, %{value: val, min_items: min}}
end
end
@spec max_items(Schema.t(), list | tuple) :: result
defp max_items(%{max_items: nil}, _list), do: :ok
defp max_items(%{max_items: max}, val) do
case size(val) <= max do
true -> :ok
false -> {:error, %{value: val, max_items: max}}
end
end
@spec unique(Schema.t(), list | tuple) :: result
defp unique(%{unique_items: nil}, _list), do: :ok
defp unique(%{unique_items: true}, list) when is_list(list) do
case unique?(list) do
true -> :ok
false -> {:error, %{value: list, unique_items: true}}
end
end
defp unique(%{unique_items: true}, tuple) when is_tuple(tuple) do
tuple
|> Tuple.to_list()
|> unique?()
|> case do
true -> :ok
false -> {:error, %{value: tuple, unique_items: true}}
end
end
@spec unique?(list, map) :: boolean
defp unique?(list, set \\ %{})
defp unique?([], _), do: true
defp unique?([h | t], set) do
case set do
%{^h => true} -> false
_ -> unique?(t, Map.put(set, h, true))
end
end
@spec contains(Schema.t(), any, keyword) :: result
defp contains(%{contains: nil}, _, _), do: :ok
defp contains(%{contains: _} = schema, tuple, opts) when is_tuple(tuple) do
with {:error, reason} <- contains(schema, Tuple.to_list(tuple), opts) do
{:error, %{reason | value: tuple}}
end
end
defp contains(%{contains: schema}, list, opts) when is_list(list) do
errors =
list
|> Enum.with_index()
|> Enum.reduce([], fn {value, index}, acc ->
case do_validate(schema, value, opts) do
:ok -> acc
{:error, reason} -> [{index, reason} | acc]
end
end)
case length(errors) < length(list) do
true -> :ok
false -> {:error, %{value: list, contains: Enum.reverse(errors)}}
end
end
@spec items(Schema.t(), list | tuple, keyword) :: result
defp items(%{items: nil}, _list, _opts), do: :ok
defp items(schema, tuple, opts) when is_tuple(tuple),
do: items(schema, Tuple.to_list(tuple), opts)
defp items(%{items: items} = schema, list, opts)
when is_list(items),
do:
items_tuple(
items,
Map.get(schema, :additional_items, true),
Enum.with_index(list),
[],
opts
)
defp items(%{items: items}, list, opts),
do: items_list(items, Enum.with_index(list), [], opts)
@spec items_list(Schema.t(), [{any, integer}], list, keyword) :: result
defp items_list(%{type: false}, [], _, _), do: :ok
defp items_list(%{type: false}, _, _, _), do: {:error, %{type: false}}
defp items_list(%{type: true}, _, _, _), do: :ok
defp items_list(_schema, [], [], _opts), do: :ok
defp items_list(_schema, [], errors, _opts),
do: {:error, %{items: Enum.reverse(errors)}}
defp items_list(schema, [{item, index} | list], errors, opts) do
case do_validate(schema, item, opts) do
:ok ->
items_list(schema, list, errors, opts)
{:error, reason} ->
items_list(schema, list, [{index, reason} | errors], opts)
end
end
@spec items_tuple(list, nil | boolean | Schema.t(), [{any, integer}], list, keyword) :: result
defp items_tuple(_schemas, _additonal_items, [], [], _opts), do: :ok
defp items_tuple(_schemas, _additonal_items, [], errors, _opts),
do: {:error, %{items: Enum.reverse(errors)}}
defp items_tuple([], false, [{_, index} | list], errors, opts),
do:
items_tuple(
[],
false,
list,
[{index, %{additional_items: false}} | errors],
opts
)
defp items_tuple([], additional_items, _list, [], _opts)
when additional_items in [nil, true],
do: :ok
defp items_tuple([], additional_items, _list, errors, _opts)
when additional_items in [nil, true],
do: {:error, %{items: Enum.reverse(errors)}}
defp items_tuple([], schema, [{item, index} | list], errors, opts) do
case do_validate(schema, item, opts) do
:ok ->
items_tuple([], schema, list, errors, opts)
{:error, reason} ->
items_tuple([], schema, list, [{index, reason} | errors], opts)
end
end
defp items_tuple(
[schema | schemas],
additional_items,
[{item, index} | list],
errors,
opts
) do
case do_validate(schema, item, opts) do
:ok ->
items_tuple(schemas, additional_items, list, errors, opts)
{:error, reason} ->
items_tuple(
schemas,
additional_items,
list,
[{index, reason} | errors],
opts
)
end
end
@spec keys(Schema.t(), any) :: result
defp keys(%{keys: nil}, _value), do: :ok
defp keys(%{keys: :atoms}, map) do
case map |> Map.keys() |> Enum.all?(&is_atom/1) do
true -> :ok
false -> {:error, %{keys: :atoms, value: map}}
end
end
defp keys(%{keys: :strings}, map) do
case map |> Map.keys() |> Enum.all?(&is_binary/1) do
true -> :ok
false -> {:error, %{keys: :strings, value: map}}
end
end
@spec properties(Schema.t(), map, keyword) :: {:ok, map} | {:error, map}
defp properties(%{properties: nil}, map, _opts), do: {:ok, map}
defp properties(%{properties: props}, map, opts),
do: do_properties(Map.to_list(props), map, %{}, opts)
@spec do_properties(list, map, map, keyword) :: result
defp do_properties([], map, errors, _opts) when errors == %{}, do: {:ok, map}
defp do_properties([], _map, errors, _opts),
do: {:error, %{properties: errors}}
defp do_properties([{prop, schema} | props], map, errors, opts) do
with {:ok, value} <- Map.fetch(map, prop),
:ok <- do_validate(schema, value, opts) do
# The list `props` can contain multiple schemas for the same value.
# The value will be just deleted if one schema is available.
# Multiple schemas are coming from `pattern_properties`.
map =
case has_key?(props, prop) do
true -> map
false -> Map.delete(map, prop)
end
do_properties(props, map, errors, opts)
else
# The property is not in the map.
:error ->
do_properties(props, map, errors, opts)
{:error, reason} ->
do_properties(
props,
Map.delete(map, prop),
Map.put(errors, prop, reason),
opts
)
end
end
@spec required(Schema.t(), map) :: result
defp required(%{required: nil}, _map), do: :ok
defp required(%{required: required}, map) do
case Enum.filter(required, fn key -> !Map.has_key?(map, key) end) do
[] ->
:ok
missing ->
{
:error,
%{required: missing}
}
end
end
@spec size(Schema.t(), map) :: result
defp size(%{min_properties: nil, max_properties: nil}, _map), do: :ok
defp size(%{min_properties: min, max_properties: max}, map) do
do_size(length(Map.keys(map)), min, max, map)
end
@spec do_size(number, number, number, map) :: result
defp do_size(len, min, _max, map) when not is_nil(min) and len < min do
{:error, %{min_properties: min, value: map}}
end
defp do_size(len, _min, max, map) when not is_nil(max) and len > max do
{:error, %{max_properties: max, value: map}}
end
defp do_size(_len, _min, _max, _map), do: :ok
@spec patterns(Schema.t(), map, keyword) :: {:ok, map} | {:error, map}
defp patterns(%{pattern_properties: nil}, map, _opts), do: {:ok, map}
defp patterns(%{pattern_properties: patterns}, map, opts) do
props =
for {pattern, schema} <- Map.to_list(patterns),
key <- Map.keys(map),
key_match?(pattern, key),
do: {key, schema}
do_properties(props, map, %{}, opts)
end
@spec key_match?(Regex.t(), String.t() | atom) :: boolean
defp key_match?(regex, atom) when is_atom(atom) do
key_match?(regex, to_string(atom))
end
defp key_match?(regex, string), do: Regex.match?(regex, string)
@spec additionals(Schema.t(), map, keyword) :: result
defp additionals(%{additional_properties: false}, map, _opts) do
case Map.equal?(map, %{}) do
true ->
:ok
false ->
{
:error,
%{
properties:
map
|> Map.keys()
|> Enum.into(%{}, fn x ->
{x, %{additional_properties: false}}
end)
}
}
end
end
defp additionals(%{additional_properties: schema}, map, opts)
when is_map(schema) do
result =
Enum.reduce(map, %{}, fn {key, value}, acc ->
case do_validate(schema, value, opts) do
:ok -> acc
{:error, reason} -> Map.put(acc, key, reason)
end
end)
case result == %{} do
true -> :ok
false -> {:error, %{properties: result}}
end
end
defp additionals(_schema, _map, _opts), do: :ok
@spec dependencies(Schema.t(), map, keyword) :: result
defp dependencies(%{dependencies: nil}, _map, _opts), do: :ok
defp dependencies(%{dependencies: dependencies}, map, opts) do
dependencies
|> Enum.filter(fn {key, _} -> has_key?(map, key) end)
|> do_dependencies(map, opts)
end
@spec do_dependencies(list, map, keyword) :: result
defp do_dependencies([], _map, _opts), do: :ok
defp do_dependencies([{key, list} | tail], map, opts) when is_list(list) do
with :ok <- do_dependencies_list(key, list, map, opts) do
do_dependencies(tail, map, opts)
end
end
defp do_dependencies([{key, schema} | tail], map, opts) do
case do_validate(schema, map, opts) do
:ok ->
do_dependencies(tail, map, opts)
{:error, reason} ->
{:error, %{dependencies: %{key => reason}}}
end
end
@spec do_dependencies_list(String.t() | atom, list, map, keyword) :: result
defp do_dependencies_list(_key, [], _map, _opts), do: :ok
defp do_dependencies_list(key, [dependency | dependencies], map, opts) do
case has_key?(map, dependency) do
true ->
do_dependencies_list(key, dependencies, map, opts)
false ->
{:error, %{dependencies: %{key => dependency}}}
end
end
# Semantic validation of strings.
@spec format(Schema.t(), any) :: result
defp format(%{format: nil}, _str), do: :ok
defp format(%{format: fmt}, str) when Format.supports(fmt) do
case Format.is?(fmt, str) do
true -> :ok
false -> {:error, %{format: fmt, value: str}}
end
end
defp format(_, _str), do: :ok
# Custom validator
@spec custom_validator(Schema.t(), any) :: result
defp custom_validator(%{validator: validator}, value)
when is_function(validator, 1) do
with {:error, reason} <- apply(validator, [value]) do
{:error, %{validator: reason, value: value}}
end
end
defp custom_validator(%{validator: {module, validator}}, value) do
with {:error, reason} <- apply(module, validator, [value]) do
{:error, %{validator: reason, value: value}}
end
end
defp custom_validator(%{validator: behaviour}, value)
when not is_nil(behaviour) and is_atom(behaviour) do
with {:error, reason} <- apply(behaviour, :validate, [value]) do
{:error, %{validator: reason, value: value}}
end
end
defp custom_validator(_, _), do: :ok
# Returns a map containing only keys that `map_1` and `map_2` have in common.
# Values for the returned map are taken from `map_2`.
@spec intersection(map, map) :: map
defp intersection(map_1, map_2) when is_map(map_1) and is_map(map_2),
do:
for(
key <- Map.keys(map_1),
true == Map.has_key?(map_2, key),
into: %{},
do: {key, Map.get(map_2, key)}
)
end
| 29.480573 | 98 | 0.594791 |
ff1ca508590042917c2ce97a7c9726662a17e20c | 511 | ex | Elixir | lib/document_generator_web/views/error_view.ex | warpas/document_generator | 1d9238b4e934461a36cc5f275ae173b47962074b | [
"MIT"
] | null | null | null | lib/document_generator_web/views/error_view.ex | warpas/document_generator | 1d9238b4e934461a36cc5f275ae173b47962074b | [
"MIT"
] | null | null | null | lib/document_generator_web/views/error_view.ex | warpas/document_generator | 1d9238b4e934461a36cc5f275ae173b47962074b | [
"MIT"
] | null | null | null | defmodule DocumentGeneratorWeb.ErrorView do
use DocumentGeneratorWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 30.058824 | 61 | 0.745597 |
ff1cb8f1ac63cbf600c180152d095a259670b0fd | 316 | ex | Elixir | lib/freshcom_web/controllers/refresh_token_controller.ex | freshcom/freshcom_web | fab44c9468e86b1770eef9971a97ad2b11545e9c | [
"BSD-3-Clause"
] | 9 | 2018-12-16T14:02:59.000Z | 2021-01-19T07:25:40.000Z | lib/freshcom_web/controllers/refresh_token_controller.ex | freshcom/freshcom_web | fab44c9468e86b1770eef9971a97ad2b11545e9c | [
"BSD-3-Clause"
] | null | null | null | lib/freshcom_web/controllers/refresh_token_controller.ex | freshcom/freshcom_web | fab44c9468e86b1770eef9971a97ad2b11545e9c | [
"BSD-3-Clause"
] | 4 | 2018-12-16T17:50:01.000Z | 2021-01-19T07:25:51.000Z | defmodule FreshcomWeb.RefreshTokenController do
use FreshcomWeb, :controller
alias Freshcom.Identity
action_fallback FreshcomWeb.FallbackController
# RetrieveRefreshToken
def show(conn, _) do
conn
|> build_request(:show)
|> Identity.get_api_key()
|> send_response(conn, :show)
end
end
| 19.75 | 48 | 0.740506 |
ff1cc299154a15162e4355eb102b5b0cf1990ec3 | 1,964 | exs | Elixir | config/dev.exs | highmobility/bluetooth-websocket-server | 512a6fab0db1821ddb99426c1bececa06b0b8dc1 | [
"MIT"
] | 10 | 2016-06-24T16:23:56.000Z | 2021-02-16T07:56:51.000Z | config/dev.exs | highmobility/bluetooth-websocket-server | 512a6fab0db1821ddb99426c1bececa06b0b8dc1 | [
"MIT"
] | 2 | 2017-10-15T11:28:54.000Z | 2017-10-19T11:58:40.000Z | config/dev.exs | highmobility/bluetooth-websocket-server | 512a6fab0db1821ddb99426c1bececa06b0b8dc1 | [
"MIT"
] | 2 | 2017-10-07T10:08:16.000Z | 2020-11-01T08:44:18.000Z | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :bluetooth_websocket_server, BluetoothWebsocketServerWeb.Endpoint,
https: [
port: 4000,
otp_app: :bluetooth_websocket_server,
keyfile: "priv/keys/localhost.key",
certfile: "priv/keys/localhost.cert",
],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [node: ["node_modules/brunch/bin/brunch", "watch", "--stdin",
cd: Path.expand("../assets", __DIR__)]]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# command from your terminal:
#
# openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" -keyout priv/server.key -out priv/server.pem
#
# The `http:` config above can be replaced with:
#
# https: [port: 4000, keyfile: "priv/server.key", certfile: "priv/server.pem"],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :bluetooth_websocket_server, BluetoothWebsocketServerWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/bluetooth_websocket_server_web/views/.*(ex)$},
~r{lib/bluetooth_websocket_server_web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
| 35.709091 | 170 | 0.715377 |
ff1cc5f97f0c53a5056a6fc3ff1db5e567da2e9b | 4,636 | ex | Elixir | expect/screenos_login.ex | mmmonk/crap | 96ba81723f043503e7ed2f96ea727b524d22b83f | [
"MIT"
] | 14 | 2015-01-14T15:53:22.000Z | 2019-06-21T06:15:47.000Z | expect/screenos_login.ex | mmmonk/crap | 96ba81723f043503e7ed2f96ea727b524d22b83f | [
"MIT"
] | 1 | 2018-04-01T08:40:17.000Z | 2020-06-24T10:05:33.000Z | expect/screenos_login.ex | mmmonk/crap | 96ba81723f043503e7ed2f96ea727b524d22b83f | [
"MIT"
] | 12 | 2015-05-13T10:52:04.000Z | 2020-10-07T14:49:37.000Z | #!/usr/bin/expect --
# $Id: 20120812$
# $Date: 2012-08-12 08:02:52$
# $Author: Marek Lukaszuk$
proc help { } {
global ip
global name
send_user "\033\[1;31m
----- Help -----
Connected to: $name
Current Mgt IP is: $ip
Ctrl+a h - this help,
Ctrl+a ? - also this help,
Ctrl+a c - basic config,
Ctrl+a l - auto login,
Ctrl+a i - change the IP address,
Ctrl+a d - quit,\033\[m
"
}
proc curtime { } {
return [ timestamp -format "%Y/%m/%d %H:%M:%S"]
}
proc backuptimeproc { } {
return [ timestamp -format "%Y%m%d_%H%M%S"]
}
# set send_slow {2 .001}
set send_slow {10 .01}
set timeout 60
if { $argc == 0 } {
exit
} else {
set host [lindex $argv 0]
set port "23"
set name [lindex $argv 0]
if { $argc > 1 } {
set port [lindex $argv 1]
}
if { $argc > 2 } {
set name [lindex $argv 2]
}
}
set pass "netscreen"
set enab "netscreen"
set user "netscreen"
set prompt "*-> "
# we are connecting to the remote host
spawn telnet $host $port
send -s "\r"
expect timeout {
send_user "failed to connect - timeout\n"
exit
} eof {
send_user "failed to connect\n"
exit
} "login*" {
send -s "$user\r"
exp_continue
} "assword:*" {
send -s "$pass\r"
exp_continue
} "*-> " {
send -s "\r"
}
set time [ curtime ]
set filetime [ backuptimeproc ]
# xterm title change
send_user "\033]2;$name $filetime\007"
set fp [open "/etc/hosts" r]
while { [gets $fp line] >=0 } {
if {[regexp $name $line]} {
set line [split $line " "]
set ip [lindex $line 0]
}
}
close $fp
send_user "\n\033\[1;33m$name IP is $ip\nCtrl+a h - for help\033\[m\n"
log_file "/home/case/store/work/_archives_worklogs/$name-$filetime.log"
send_log "\n---------- log start at $time ----------\n"
interact {
# so that we don't get logout when idle
# timeout 180 { send " \b"}
\004 {
send_user "\nbye, bye\n"
set pid [exp_pid]
system "kill $pid"
}
\001l {
send -s "\r"
expect "login*" {
send -s "$user\r"
exp_continue
} "assword:*" {
send -s "$pass\r"
exp_continue
} "*-> " {
send -s "\r"
}
}
\001h {
help
}
\001? {
help
}
\001i {
send_user "\nThe new mgt IP: "
stty cooked echo
expect_user -re "(.*)\n"
stty raw -echo
global ip
set ip $expect_out(1,string)
send "\r"
}
\001c {
send -s "set clock [curtime]\r"
expect "$prompt"
send -s "set hostname lab-$name\r"
expect "$prompt"
send -s "set interface mgt ip $ip/23\r"
expect "$prompt"
send -s "set interface eth0 ip $ip/23\r"
expect "$prompt"
send -s "set interface eth0/0 ip $ip/23\r"
expect "$prompt"
send -s "set interface eth0 manage\r"
expect "$prompt"
send -s "set interface eth0/0 manage\r"
expect "$prompt"
foreach cmd { "set console page 0"
"set console timeout 0"
"set dbuf size 4096"
"set admin name netscreen"
"set admin password netscreen"
"set admin auth server \"Local\""} {
send -s "$cmd\r"
expect "$prompt"
}
foreach alias { "ss \"save software from tftp 172.30.73.133\""
"sc \"save config from tftp 172.30.73.133\""
"sh \"get\""
"wr \"save\""
"reload \"reset no-prompt\""
"q \"exit\""} {
send -s "set alias $alias\r"
expect "$prompt"
}
}
# \0010 {
# set timeout 3600
# match_max 10240000
# send -s "get tech-support\r"
# expect "$prompt"
# send -s "get event\r"
# expect "$prompt"
# send -s "get log sys\r"
# expect "$prompt"
# send -s "get nsrp\r"
# expect "$prompt"
# send -s "get nsrp monitor\r"
# expect "$prompt"
# send -s "get interface\r"
#
# for { set x 1 } { $x<=4 } { incr x } {
#
# expect "$prompt"
# send -s "get nsrp\r"
# expect "$prompt"
# send -s "get nsrp counter\r"
# expect "$prompt"
# send -s "get nsrp coun packet\r"
# expect "$prompt"
# send -s "get interface\r"
# }
#
#
# expect "$prompt"
# send -s "get db stream\r"
# expect "$prompt"
# sleep 5
# match_max -d
# set timeout 30
# }
\001t {
set timeout 3600
set prompt "*-> "
for { set x 1 } { $x<=130 } { incr x } {
send -s "set route 10.0.$x.0/24 interface eth0/0 gateway 172.30.72.1\r"
expect "$prompt"
sleep 1
}
send_user "\ndone\n"
send -s "\r"
set timeout 30
}
\001g {
set timeout 3600
set prompt "*-> "
for { set x 1 } { $x<=2 } { incr x } {
send -s "get clock\r"
expect "$prompt"
sleep 10
}
send_user "\ndone\n"
send -s "\r"
set timeout 30
}
\177 {
send "\010"
}
"\033\[3~" {
send "\177"
}
}
set time [ curtime ]
send_log "\n---------- log close at $time ----------\n"
log_file
exec /bin/bzip2 /home/case/store/work/_archives_worklogs/$name-$filetime.log
exit
| 18.693548 | 76 | 0.575712 |
ff1cf192ec4bc3ab0895f9be72991bd654efb4e3 | 1,204 | exs | Elixir | apps/ut_monitor_fw/mix.exs | usertesting/nerves_status_monitors | 18ccc7a3f3787d847a09328cea6c49b51605338a | [
"MIT"
] | 1 | 2017-02-10T17:41:02.000Z | 2017-02-10T17:41:02.000Z | apps/ut_monitor_fw/mix.exs | usertesting/nerves_status_monitors | 18ccc7a3f3787d847a09328cea6c49b51605338a | [
"MIT"
] | null | null | null | apps/ut_monitor_fw/mix.exs | usertesting/nerves_status_monitors | 18ccc7a3f3787d847a09328cea6c49b51605338a | [
"MIT"
] | null | null | null | defmodule UtMonitorFw.Mixfile do
use Mix.Project
@target "linkit"
def project do
[app: :ut_monitor_fw,
version: "0.0.1",
target: @target,
archives: [nerves_bootstrap: "~> 0.2.1"],
deps_path: "deps/#{@target}",
build_path: "_build/#{@target}",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
aliases: aliases(),
deps: deps() ++ system(@target)]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[mod: {UtMonitorFw, []},
applications: [:logger, :nerves_uart, :nerves_interim_wifi, :nerves_ntp, :ut_monitor_lib]]
end
def deps do
[
{:nerves, "~> 0.4.0"},
{:nerves_interim_wifi, "~> 0.1"},
{:nerves_ntp, "~> 0.1"},
{:ut_monitor_lib, in_umbrella: true},
{:nerves_uart, "~> 0.1.1"},
{:credo, "~> 0.4", only: [:dev, :test]},
{:apex, "~> 1.0.0"}
]
end
def system(target) do
[{:"nerves_system_#{target}", "~> 0.10.0"}]
end
def aliases do
["deps.precompile": ["nerves.precompile", "deps.precompile"],
"deps.loadpaths": ["deps.loadpaths", "nerves.loadpaths"]]
end
end
| 24.571429 | 95 | 0.584718 |
ff1cf9537ece34339d948d630c3606eeec36ede2 | 42,305 | exs | Elixir | test/browser_test.exs | letitcrash/elixir-browser | 387455d9623f24a6cab05efb39551753a414aceb | [
"MIT"
] | 46 | 2015-10-31T18:49:39.000Z | 2018-09-14T04:32:35.000Z | test/browser_test.exs | letitcrash/elixir-browser | 387455d9623f24a6cab05efb39551753a414aceb | [
"MIT"
] | 17 | 2016-04-27T00:32:49.000Z | 2018-09-20T23:33:43.000Z | test/browser_test.exs | letitcrash/elixir-browser | 387455d9623f24a6cab05efb39551753a414aceb | [
"MIT"
] | 16 | 2016-04-27T00:21:02.000Z | 2018-09-10T04:32:23.000Z | defmodule BrowserTest do
use ExUnit.Case
use Plug.Test
import Plug.Conn
alias Browser.Ua
test "returns empty string when Conn has no user-agent header" do
assert conn(:get, "/") |> Ua.to_ua == ""
end
test "retrieves first UA when Conn has user-agent header" do
conn = conn(:get, "/")
|> put_req_header("user-agent", "Mozilla 5.0")
|> put_req_header("user-agent", "Opera")
assert conn |> Ua.to_ua == "Opera"
end
test "detects android" do
ua = Fixtures.ua["ANDROID"]
assert Browser.name(ua) == "Android"
assert Browser.full_browser_name(ua) == "Android 3.1.2"
assert Browser.full_display(ua) == "Android 3.1.2 on Android 1.5"
assert Browser.android?(ua)
refute Browser.safari?(ua)
assert Browser.webkit?(ua)
assert Browser.mobile?(ua)
refute Browser.tablet?(ua)
assert Browser.modern?(ua)
assert Browser.full_version(ua) == "3.1.2"
assert Browser.version(ua) == "3"
end
test "detects surface tablet" do
ua = Fixtures.ua["SURFACE"]
assert Browser.name(ua) == "Internet Explorer"
assert Browser.full_browser_name(ua) == "Internet Explorer 10.0"
assert Browser.full_display(ua) == "Internet Explorer 10.0 on Windows RT"
assert Browser.surface?(ua)
assert Browser.ie?(ua)
refute Browser.mobile?(ua)
assert Browser.modern?(ua)
assert Browser.full_version(ua) == "10.0"
assert Browser.version(ua) == "10"
end
test "detects quicktime" do
ua = Fixtures.ua["QUICKTIME"]
assert Browser.name(ua) == "QuickTime"
assert Browser.full_browser_name(ua) == "QuickTime 7.6.8"
assert Browser.full_display(ua) == "QuickTime 7.6.8 on Windows XP"
assert Browser.quicktime?(ua)
assert Browser.full_version(ua) == "7.6.8"
assert Browser.version(ua) == "7"
end
test "detects core media" do
ua = Fixtures.ua["COREMEDIA"]
assert Browser.name(ua) == "Apple CoreMedia"
assert Browser.full_browser_name(ua) == "Apple CoreMedia 1.0.0.10F569"
assert Browser.full_display(ua) == "Apple CoreMedia 1.0.0.10F569 on MacOS"
assert Browser.core_media?(ua)
assert Browser.full_version(ua) == "1.0.0.10F569"
assert Browser.version(ua) == "1"
end
test "detects phantom.js" do
ua = Fixtures.ua["PHANTOM_JS"]
assert Browser.name(ua) == "PhantomJS"
assert Browser.full_browser_name(ua) == "PhantomJS 1.9.0"
assert Browser.full_display(ua) == "PhantomJS 1.9.0 on MacOS"
assert Browser.phantom_js?(ua)
refute Browser.tablet?(ua)
refute Browser.mobile?(ua)
assert Browser.modern?(ua)
assert Browser.full_version(ua) == "1.9.0"
assert Browser.version(ua) == "1"
end
test "detects other mobiles" do
ua = "Symbian OS"
assert Browser.mobile?(ua)
refute Browser.tablet?(ua)
ua = "MIDP-2.0"
assert Browser.mobile?(ua)
refute Browser.tablet?(ua)
end
test "detects windows mobile" do
ua = Fixtures.ua["WINDOWS_MOBILE"]
assert Browser.mobile?(ua)
assert Browser.windows?(ua)
assert Browser.windows_mobile?(ua)
refute Browser.windows_phone?(ua)
refute Browser.tablet?(ua)
end
test "detects unknown id" do
ua = "Unknown"
assert Browser.id(ua) == :other
end
test "detects unknown name" do
ua = "Unknown"
assert Browser.name(ua) == "Other"
assert Browser.full_browser_name(ua) == "Other 0.0"
assert Browser.full_display(ua) == "Other 0.0 on Other"
end
test "detects ios platform" do
iphone = Fixtures.ua["IPHONE"]
ipad = Fixtures.ua["IPAD"]
assert Browser.platform(iphone) == :ios
assert Browser.platform(ipad) == :ios
end
test "detects android platform" do
android_1 = Fixtures.ua["ANDROID"]
android_2 = Fixtures.ua["ANDROID_WITH_SAFARI"]
assert Browser.platform(android_1) == :android
assert Browser.platform(android_2) == :android
end
test "detects mac platform" do
ua = "Mac OS X"
assert Browser.platform(ua) == :mac
assert Browser.mac?(ua)
end
test "detects mac version with underscore notation" do
ua = "Intel Mac OS X 10_13_1; en-us"
assert Browser.mac_version(ua) == "10.13.1"
end
test "detects mac version with dot notation" do
ua = "Intel Mac OS X 10.13.1; en-us"
assert Browser.mac_version(ua) == "10.13.1"
end
test "mac version name" do
ua_10_0 = "Intel Mac OS X 10_0_1; en-us"
ua_10_1 = "Intel Mac OS X 10_1_1; en-us"
ua_10_2 = "Intel Mac OS X 10_2_1; en-us"
ua_10_3 = "Intel Mac OS X 10_3_1; en-us"
ua_10_4 = "Intel Mac OS X 10_4_1; en-us"
ua_10_5 = "Intel Mac OS X 10_5_1; en-us"
ua_10_6 = "Intel Mac OS X 10_6_1; en-us"
ua_10_7 = "Intel Mac OS X 10_7_1; en-us"
ua_10_8 = "Intel Mac OS X 10_8_1; en-us"
ua_10_9 = "Intel Mac OS X 10_9_1; en-us"
ua_10_10 = "Intel Mac OS X 10_10_1; en-us"
ua_10_11 = "Intel Mac OS X 10_11_1; en-us"
ua_10_12 = "Intel Mac OS X 10_12_1; en-us"
ua_10_13 = "Intel Mac OS X 10_13_1; en-us"
assert Browser.mac_version_name(ua_10_13) == "High Sierra"
assert Browser.mac_version_name(ua_10_12) == "Sierra"
assert Browser.mac_version_name(ua_10_11) == "El Capitan"
assert Browser.mac_version_name(ua_10_10) == "Yosemite"
assert Browser.mac_version_name(ua_10_9) == "Mavericks"
assert Browser.mac_version_name(ua_10_8) == "Mountain Lion"
assert Browser.mac_version_name(ua_10_7) == "Lion"
assert Browser.mac_version_name(ua_10_6) == "Snow Leopard"
assert Browser.mac_version_name(ua_10_5) == "Leopard"
assert Browser.mac_version_name(ua_10_4) == "Tiger"
assert Browser.mac_version_name(ua_10_3) == "Panther"
assert Browser.mac_version_name(ua_10_2) == "Jaguar"
assert Browser.mac_version_name(ua_10_1) == "Puma"
assert Browser.mac_version_name(ua_10_0) == "Cheetah"
assert Browser.mac_version_name("Mac OS X") == ""
end
test "detects linux platform" do
ua = "Linux"
assert Browser.platform(ua) == :linux
assert Browser.linux?(ua)
end
test "detects unknown platform" do
ua = "Unknown"
assert Browser.platform(ua) == :other
end
test "detects mobile device type" do
ua = Fixtures.ua["IPHONE"]
assert Browser.device_type(ua) == :mobile
end
test "detects tablet device type" do
ua = Fixtures.ua["IPAD"]
assert Browser.device_type(ua) == :tablet
end
test "detects console device type" do
ua = Fixtures.ua["XBOXONE"]
assert Browser.device_type(ua) == :console
end
test "detects desktop device type" do
ua = Fixtures.ua["CHROME"]
assert Browser.device_type(ua) == :desktop
end
test "detects unknown device type" do
ua = "Unknown"
assert Browser.device_type(ua) == :unknown
end
test "detects xoom" do
ua = Fixtures.ua["XOOM"]
assert Browser.android?(ua)
assert Browser.tablet?(ua)
refute Browser.mobile?(ua)
end
test "detects nexus tablet" do
ua = Fixtures.ua["NEXUS_TABLET"]
assert Browser.android?(ua)
assert Browser.tablet?(ua)
refute Browser.mobile?(ua)
end
test "detects nook" do
ua = Fixtures.ua["NOOK"]
assert Browser.tablet?(ua)
refute Browser.mobile?(ua)
end
test "detects samsung" do
ua = Fixtures.ua["SAMSUNG"]
assert Browser.tablet?(ua)
refute Browser.mobile?(ua)
end
test "detects tv" do
ua = Fixtures.ua["SMART_TV"]
assert Browser.tv?(ua)
end
test "knows a supported browser" do
ua = "Chrome"
assert Browser.known?(ua)
end
test "does not know an unsupported browser" do
ua = "Fancy new browser"
refute Browser.known?(ua)
end
test "detects adobe air" do
ua = Fixtures.ua["ADOBE_AIR"]
assert Browser.adobe_air?(ua)
assert Browser.webkit?(ua)
assert Browser.version(ua) == "13"
assert Browser.full_version(ua) == "13.0"
assert Browser.name(ua) == "Other"
assert Browser.full_browser_name(ua) == "Other 13.0"
assert Browser.full_display(ua) == "Other 13.0 on MacOS"
end
# android
test "detect android cupcake (1.5)" do
ua = Fixtures.ua["ANDROID_CUPCAKE"]
assert Browser.android?(ua)
assert Browser.android?(ua, 1.5)
end
test "detect android donut (1.6)" do
ua = Fixtures.ua["ANDROID_DONUT"]
assert Browser.android?(ua)
assert Browser.android?(ua, 1.6)
end
test "detect android eclair (2.1)" do
ua = Fixtures.ua["ANDROID_ECLAIR_21"]
assert Browser.android?(ua)
assert Browser.android?(ua, 2.1)
end
test "detect android froyo (2.2)" do
ua = Fixtures.ua["ANDROID_FROYO"]
assert Browser.android?(ua)
assert Browser.android?(ua, 2.2)
end
test "detect android gingerbread (2.3)" do
ua = Fixtures.ua["ANDROID_GINGERBREAD"]
assert Browser.android?(ua)
assert Browser.android?(ua, 2.3)
end
test "detect android honeycomb (3.0)" do
ua = Fixtures.ua["ANDROID_HONEYCOMB_30"]
assert Browser.android?(ua)
assert Browser.android?(ua, 3.0)
end
test "detect android ice cream sandwich (4.0)" do
ua = Fixtures.ua["ANDROID_ICECREAM"]
assert Browser.android?(ua)
assert Browser.android?(ua, 4.0)
end
test "detect android jellybean (4.1)" do
ua = Fixtures.ua["ANDROID_JELLYBEAN_41"]
assert Browser.android?(ua)
assert Browser.android?(ua, 4.1)
end
test "detect android jellybean (4.2)" do
ua = Fixtures.ua["ANDROID_JELLYBEAN_42"]
assert Browser.android?(ua)
assert Browser.android?(ua, 4.2)
end
test "detect android jellybean (4.3)" do
ua = Fixtures.ua["ANDROID_JELLYBEAN_43"]
assert Browser.android?(ua)
assert Browser.android?(ua, 4.3)
end
test "detect android kitkat (4.4)" do
ua = Fixtures.ua["ANDROID_KITKAT"]
assert Browser.android?(ua)
assert Browser.android?(ua, 4.4)
end
test "detect android lollipop (5.0)" do
ua = Fixtures.ua["ANDROID_LOLLIPOP_50"]
assert Browser.android?(ua)
assert Browser.android?(ua, 5.0)
end
test "detect android lollipop (5.1)" do
ua = Fixtures.ua["ANDROID_LOLLIPOP_51"]
assert Browser.android?(ua)
assert Browser.android?(ua, 5.1)
end
test "detect android tv" do
ua = Fixtures.ua["ANDROID_TV"]
assert Browser.android?(ua)
assert Browser.tv?(ua)
end
test "detect nexus player" do
ua = Fixtures.ua["ANDROID_NEXUS_PLAYER"]
assert Browser.android?(ua)
assert Browser.tv?(ua)
end
# blackberry
test "detects blackberry" do
ua = Fixtures.ua["BLACKBERRY"]
assert Browser.name(ua), "BlackBerry"
assert Browser.full_browser_name(ua) == "BlackBerry 4.1.0"
assert Browser.full_display(ua) == "BlackBerry 4.1.0 on BlackBerry"
assert Browser.blackberry?(ua)
refute Browser.tablet?(ua)
assert Browser.mobile?(ua)
refute Browser.modern?(ua)
assert Browser.full_version(ua) == "4.1.0"
assert Browser.version(ua) == "4"
end
test "detects blackberry4" do
ua = Fixtures.ua["BLACKBERRY4"]
assert Browser.name(ua), "BlackBerry"
assert Browser.full_browser_name(ua) == "BlackBerry 4.2.1"
assert Browser.full_display(ua) == "BlackBerry 4.2.1 on BlackBerry 4"
assert Browser.blackberry_version(ua) == "4"
assert Browser.blackberry?(ua, 4)
refute Browser.tablet?(ua)
assert Browser.mobile?(ua)
refute Browser.modern?(ua)
assert Browser.full_version(ua) == "4.2.1"
assert Browser.version(ua) == "4"
end
test "detects blackberry5" do
ua = Fixtures.ua["BLACKBERRY5"]
assert Browser.name(ua), "BlackBerry"
assert Browser.full_browser_name(ua) == "BlackBerry 5.0.0.93"
assert Browser.full_display(ua) == "BlackBerry 5.0.0.93 on BlackBerry 5"
assert Browser.blackberry?(ua, 5)
assert Browser.blackberry_version(ua) == "5"
refute Browser.tablet?(ua)
assert Browser.mobile?(ua)
refute Browser.modern?(ua)
assert Browser.full_version(ua) == "5.0.0.93"
assert Browser.version(ua) == "5"
end
test "detects blackberry6" do
ua = Fixtures.ua["BLACKBERRY6"]
assert Browser.name(ua) =="Safari"
assert Browser.full_browser_name(ua) == "Safari 534.11"
assert Browser.full_display(ua) == "Safari 534.11 on BlackBerry 6"
assert Browser.blackberry?(ua, 6)
assert Browser.blackberry_version(ua) == "6"
refute Browser.tablet?(ua)
assert Browser.mobile?(ua)
assert Browser.modern?(ua)
assert Browser.full_version(ua) == "534.11"
assert Browser.version(ua) == "534"
end
test "detects blackberry7" do
ua = Fixtures.ua["BLACKBERRY7"]
assert Browser.name(ua) =="Safari"
assert Browser.full_browser_name(ua) == "Safari 534.11"
assert Browser.full_display(ua) == "Safari 534.11 on BlackBerry 7"
assert Browser.blackberry?(ua, 7)
assert Browser.blackberry_version(ua) == "7"
refute Browser.tablet?(ua)
assert Browser.mobile?(ua)
assert Browser.modern?(ua)
assert Browser.full_version(ua) == "534.11"
assert Browser.version(ua) == "534"
end
test "detects blackberry10" do
ua = Fixtures.ua["BLACKBERRY10"]
assert Browser.name(ua) =="Safari"
assert Browser.full_browser_name(ua) == "Safari 10.0.9.1675"
assert Browser.full_display(ua) == "Safari 10.0.9.1675 on BlackBerry 10"
assert Browser.blackberry_version(ua) =="10"
assert Browser.blackberry?(ua, 10)
refute Browser.tablet?(ua)
assert Browser.mobile?(ua)
assert Browser.modern?(ua)
assert Browser.full_version(ua) == "10.0.9.1675"
assert Browser.version(ua) == "10"
end
test "detects blackberry playbook tablet" do
ua = Fixtures.ua["PLAYBOOK"]
refute Browser.android?(ua)
assert Browser.tablet?(ua)
refute Browser.mobile?(ua)
assert Browser.full_version(ua) == "7.2.1.0"
assert Browser.version(ua) == "7"
end
# bots
test "detects bots" do
refute Browser.bot?("")
refute Browser.bot?(%Plug.Conn{})
for {_key, ua} <- Fixtures.bot_ua do
assert Browser.bot?(ua), "#{ua} should be a bot"
conn =
%Plug.Conn{}
|> Plug.Conn.put_req_header("user-agent", ua)
assert Browser.bot?(conn), "#{ua} should be a bot"
end
ua = Fixtures.ua["CHROME"]
refute Browser.bot?(ua)
end
test "detects bots based on keywords" do
test_cases = ~w(
content-fetcher
content-crawler
some-search-engine
monitoring-service
content-spider
some-bot
)
for ua <- test_cases do
assert Browser.bot?(ua), "#{ua} should be a bot"
conn =
%Plug.Conn{}
|> Plug.Conn.put_req_header("user-agent", ua)
assert Browser.bot?(conn), "#{ua} should be a bot"
end
end
test "respects bot_exceptions" do
test_cases = [
"ruby build",
"pinterest/android",
"pinterest/ios",
"yandexsearchbrowser"
]
for ua <- test_cases do
refute Browser.bot?(ua), "#{ua} should not be a bot"
end
end
test "detects Google Page Speed as a bot" do
ua = Fixtures.ua["GOOGLE_PAGE_SPEED_INSIGHTS"]
assert Browser.bot?(ua)
end
test "doesn't consider empty UA as bot" do
ua = ""
refute Browser.bot?(ua)
end
test "allows setting empty string as bots" do
ua = ""
assert Browser.bot?(ua, detect_empty_ua: true)
end
test "doesn't detect mozilla as a bot when considering empty UA" do
ua = "Mozilla"
refute Browser.bot?(ua, detect_empty_ua: true)
end
test "returns bot name" do
ua = Fixtures.ua["GOOGLE_BOT"]
assert Browser.bot_name(ua) == "googlebot"
ua = Fixtures.ua["FACEBOOK_BOT"]
assert Browser.bot_name(ua) == "facebookexternalhit"
end
test "returns bot name (empty string ua detection enabled)" do
ua = ""
assert Browser.bot_name(ua, detect_empty_ua: true) == "Generic Bot"
end
test "returns nil for non-bots" do
ua = Fixtures.ua["CHROME"]
assert Browser.bot_name(ua) == nil
end
test "detects as search engines" do
refute Browser.search_engine?("")
refute Browser.search_engine?(%Plug.Conn{})
Enum.each ~w[
ASK
BAIDU
BINGBOT
DUCKDUCKGO
GOOGLE_BOT
YAHOO_SLURP
], fn key ->
ua = Fixtures.ua[key]
assert Browser.search_engine?(ua), "#{Fixtures.ua[key]} should be a search engine"
conn =
%Plug.Conn{}
|> Plug.Conn.put_req_header("user-agent", ua)
assert Browser.search_engine?(conn), "#{Fixtures.ua[key]} should be a search engine"
end
end
test "detects Google Structured Data Testing Tool as a bot" do
ua = Fixtures.ua["GOOGLE_STRUCTURED_DATA_TESTING_TOOL"]
assert Browser.bot?(ua), "Google Structured Data Testing Tool should be a bot"
end
test "detects Daumoa" do
ua = Fixtures.ua["DAUMOA"]
assert :ie == Browser.id(ua)
assert "Internet Explorer" == Browser.name(ua)
assert "0.0" == Browser.msie_full_version(ua)
assert "0" == Browser.msie_version(ua)
assert "0.0" == Browser.full_version(ua)
assert "0" == Browser.version(ua)
assert Browser.ie?(ua)
assert Browser.bot?(ua)
refute Browser.windows10?(ua)
refute Browser.windows_phone?(ua)
refute Browser.edge?(ua)
refute Browser.modern?(ua)
refute Browser.mobile?(ua)
refute Browser.webkit?(ua)
refute Browser.chrome?(ua)
refute Browser.safari?(ua)
end
# chrome
test "detects chrome" do
ua = Fixtures.ua["CHROME"]
assert Browser.name(ua) =="Chrome"
assert Browser.full_browser_name(ua) == "Chrome 5.0.375.99"
assert Browser.full_display(ua) == "Chrome 5.0.375.99 on MacOS 10.6.4 Snow Leopard"
assert Browser.chrome?(ua)
refute Browser.safari?(ua)
assert Browser.webkit?(ua)
assert Browser.modern?(ua)
assert Browser.full_version(ua) == "5.0.375.99"
assert Browser.version(ua) == "5"
end
test "detects mobile chrome" do
ua = Fixtures.ua["MOBILE_CHROME"]
assert Browser.name(ua) =="Chrome"
assert Browser.full_browser_name(ua) == "Chrome 19.0.1084.60"
assert Browser.full_display(ua) == "Chrome 19.0.1084.60 on iOS 5"
assert Browser.chrome?(ua)
refute Browser.safari?(ua)
assert Browser.webkit?(ua)
assert Browser.modern?(ua)
assert Browser.full_version(ua) == "19.0.1084.60"
assert Browser.version(ua) == "19"
end
test "detects samsung chrome" do
ua = Fixtures.ua["SAMSUNG_CHROME"]
assert Browser.name(ua) =="Chrome"
assert Browser.full_browser_name(ua) == "Chrome 28.0.1500.94"
assert Browser.full_display(ua) == "Chrome 28.0.1500.94 on Android 4.4.2"
assert Browser.chrome?(ua)
assert Browser.android?(ua)
refute Browser.safari?(ua)
assert Browser.webkit?(ua)
assert Browser.modern?(ua)
assert Browser.full_version(ua) == "28.0.1500.94"
assert Browser.version(ua) == "28"
end
test "detects chrome os" do
ua = Fixtures.ua["CHROME_OS"]
assert Browser.chrome_os?(ua)
end
test "detects yandex browser" do
ua = Fixtures.ua["YANDEX_BROWSER"]
assert Browser.yandex?(ua)
assert Browser.chrome?(ua)
refute Browser.safari?(ua)
assert Browser.webkit?(ua)
assert Browser.full_version(ua) == "41.0.2272.118"
assert Browser.version(ua) == "41"
end
# console
test "detects nintendo wii" do
ua = Fixtures.ua["NINTENDO_WII"]
assert Browser.console?(ua)
assert Browser.nintendo?(ua)
end
test "detects nintendo wii u" do
ua = Fixtures.ua["NINTENDO_WIIU"]
assert Browser.console?(ua)
assert Browser.nintendo?(ua)
end
test "detects playstation 3" do
ua = Fixtures.ua["PLAYSTATION3"]
assert Browser.console?(ua)
assert Browser.playstation?(ua)
refute Browser.playstation4?(ua)
end
test "detects playstation 4" do
ua = Fixtures.ua["PLAYSTATION4"]
assert Browser.console?(ua)
assert Browser.playstation?(ua)
assert Browser.playstation4?(ua)
end
test "detects xbox 360" do
ua = Fixtures.ua["XBOX360"]
assert Browser.console?(ua)
assert Browser.xbox?(ua)
refute Browser.xbox_one?(ua)
end
test "detects xbox one" do
ua = Fixtures.ua["XBOXONE"]
assert Browser.console?(ua)
assert Browser.xbox?(ua)
assert Browser.xbox_one?(ua)
end
test "detects psp" do
ua = Fixtures.ua["PSP"]
assert Browser.name(ua) == "PlayStation Portable"
assert Browser.full_browser_name(ua) == "PlayStation Portable 0.0"
assert Browser.full_display(ua) == "PlayStation Portable 0.0 on Other"
assert Browser.psp?(ua)
refute Browser.psp_vita?(ua)
assert Browser.mobile?(ua)
end
test "detects psp vita" do
ua = Fixtures.ua["PSP_VITA"]
assert Browser.name(ua) == "PlayStation Portable"
assert Browser.full_display(ua) == "PlayStation Portable 0.0 on Other"
assert Browser.psp?(ua)
assert Browser.psp_vita?(ua)
assert Browser.mobile?(ua)
end
# firefox
test "detects firefox" do
ua = Fixtures.ua["FIREFOX"]
assert Browser.name(ua) == "Firefox"
assert Browser.full_browser_name(ua) == "Firefox 3.8"
assert Browser.full_display(ua) == "Firefox 3.8 on Linux"
assert Browser.firefox?(ua)
refute Browser.modern?(ua)
assert Browser.full_version(ua) == "3.8"
assert Browser.version(ua) == "3"
end
test "detects modern firefox" do
ua = Fixtures.ua["FIREFOX_MODERN"]
assert Browser.id(ua) == :firefox
assert Browser.name(ua) == "Firefox"
assert Browser.full_browser_name(ua) == "Firefox 17.0"
assert Browser.full_display(ua) == "Firefox 17.0 on Linux"
assert Browser.firefox?(ua)
assert Browser.modern?(ua)
assert Browser.full_version(ua) == "17.0"
assert Browser.version(ua) == "17"
end
test "detects firefox android tablet" do
ua = Fixtures.ua["FIREFOX_TABLET"]
assert Browser.id(ua) == :firefox
assert Browser.name(ua) == "Firefox"
assert Browser.full_browser_name(ua) == "Firefox 14.0"
assert Browser.full_display(ua) == "Firefox 14.0 on Android"
assert Browser.firefox?(ua)
assert Browser.modern?(ua)
assert Browser.tablet?(ua)
assert Browser.android?(ua)
assert Browser.full_version(ua) == "14.0"
assert Browser.version(ua) == "14"
end
# IE
test "detects ie6" do
ua = Fixtures.ua["IE6"]
assert Browser.name(ua) == "Internet Explorer"
assert Browser.full_browser_name(ua) == "Internet Explorer 6.0"
assert Browser.full_display(ua) == "Internet Explorer 6.0 on Windows XP"
assert Browser.ie?(ua)
assert Browser.ie?(ua, 6)
refute Browser.modern?(ua)
assert Browser.full_version(ua) == "6.0"
assert Browser.version(ua) == "6"
end
test "detects ie7" do
ua = Fixtures.ua["IE7"]
assert Browser.name(ua) == "Internet Explorer"
assert Browser.full_browser_name(ua) == "Internet Explorer 7.0"
assert Browser.full_display(ua) == "Internet Explorer 7.0 on Windows Vista"
assert Browser.ie?(ua)
assert Browser.ie?(ua, 7)
refute Browser.modern?(ua)
assert Browser.full_version(ua) == "7.0"
assert Browser.version(ua) == "7"
end
test "detects ie8" do
ua = Fixtures.ua["IE8"]
assert Browser.name(ua) == "Internet Explorer"
assert Browser.full_browser_name(ua) == "Internet Explorer 8.0"
assert Browser.full_display(ua) == "Internet Explorer 8.0 on Windows 8"
assert Browser.ie?(ua)
assert Browser.ie?(ua, 8)
refute Browser.modern?(ua)
refute Browser.compatibility_view?(ua)
assert Browser.full_version(ua) == "8.0"
assert Browser.version(ua) == "8"
end
test "detects ie8 in compatibility view" do
ua = Fixtures.ua["IE8_COMPAT"]
assert Browser.name(ua) == "Internet Explorer"
assert Browser.full_browser_name(ua) == "Internet Explorer 8.0"
assert Browser.full_display(ua) == "Internet Explorer 8.0 on Windows Vista"
assert Browser.ie?(ua)
assert Browser.ie?(ua, 8)
refute Browser.modern?(ua)
assert Browser.compatibility_view?(ua)
assert Browser.full_version(ua) == "8.0"
assert Browser.version(ua) == "8"
assert Browser.msie_full_version(ua) == "7.0"
assert Browser.msie_version(ua) == "7"
end
test "detects ie9" do
ua = Fixtures.ua["IE9"]
assert Browser.name(ua) == "Internet Explorer"
assert Browser.full_browser_name(ua) == "Internet Explorer 9.0"
assert Browser.full_display(ua) == "Internet Explorer 9.0 on Windows 7"
assert Browser.ie?(ua)
assert Browser.ie?(ua, 9)
assert Browser.modern?(ua)
refute Browser.compatibility_view?(ua)
assert Browser.full_version(ua) == "9.0"
assert Browser.version(ua) == "9"
end
test "detects ie9 in compatibility view" do
ua = Fixtures.ua["IE9_COMPAT"]
assert Browser.name(ua) == "Internet Explorer"
assert Browser.full_browser_name(ua) == "Internet Explorer 9.0"
assert Browser.full_display(ua) == "Internet Explorer 9.0 on Windows Vista"
assert Browser.ie?(ua)
assert Browser.ie?(ua, 9)
refute Browser.modern?(ua)
assert Browser.compatibility_view?(ua)
assert Browser.full_version(ua) == "9.0"
assert Browser.version(ua) == "9"
assert Browser.msie_full_version(ua) == "7.0"
assert Browser.msie_version(ua) == "7"
end
test "detects ie10" do
ua = Fixtures.ua["IE10"]
assert Browser.name(ua) == "Internet Explorer"
assert Browser.full_browser_name(ua) == "Internet Explorer 10.0"
assert Browser.full_display(ua) == "Internet Explorer 10.0 on Windows 7"
assert Browser.ie?(ua)
assert Browser.ie?(ua, 10)
assert Browser.modern?(ua)
refute Browser.compatibility_view?(ua)
assert Browser.full_version(ua) == "10.0"
assert Browser.version(ua) == "10"
end
test "detects ie10 in compatibility view" do
ua = Fixtures.ua["IE10_COMPAT"]
assert Browser.name(ua) == "Internet Explorer"
assert Browser.full_browser_name(ua) == "Internet Explorer 10.0"
assert Browser.full_display(ua) == "Internet Explorer 10.0 on Windows 7"
assert Browser.ie?(ua)
assert Browser.ie?(ua, 10)
refute Browser.modern?(ua)
assert Browser.compatibility_view?(ua)
assert Browser.full_version(ua) == "10.0"
assert Browser.version(ua) == "10"
assert Browser.msie_full_version(ua) == "7.0"
assert Browser.msie_version(ua) == "7"
end
test "detects ie11" do
ua = Fixtures.ua["IE11"]
assert Browser.name(ua) == "Internet Explorer"
assert Browser.full_browser_name(ua) == "Internet Explorer 11.0"
assert Browser.full_display(ua) == "Internet Explorer 11.0 on Windows 7"
assert Browser.ie?(ua)
assert Browser.ie?(ua, 11)
assert Browser.modern?(ua)
refute Browser.compatibility_view?(ua)
assert Browser.full_version(ua) == "11.0"
assert Browser.version(ua) == "11"
end
test "detects ie11 in compatibility view" do
ua = Fixtures.ua["IE11_COMPAT"]
assert Browser.name(ua) == "Internet Explorer"
assert Browser.full_browser_name(ua) == "Internet Explorer 11.0"
assert Browser.full_display(ua) == "Internet Explorer 11.0 on Windows 8.1"
assert Browser.ie?(ua)
assert Browser.ie?(ua, 11)
refute Browser.modern?(ua)
assert Browser.compatibility_view?(ua)
assert Browser.full_version(ua) == "11.0"
assert Browser.version(ua) == "11"
assert Browser.msie_full_version(ua) == "7.0"
assert Browser.msie_version(ua) == "7"
end
test "detects Lumia 800" do
ua = Fixtures.ua["LUMIA800"]
assert Browser.name(ua) == "Internet Explorer"
assert Browser.full_browser_name(ua) == "Internet Explorer 9.0"
assert Browser.full_display(ua) == "Internet Explorer 9.0 on Windows 7"
assert Browser.ie?(ua)
assert Browser.ie?(ua, 9)
assert Browser.full_version(ua) == "9.0"
assert Browser.version(ua) == "9"
refute Browser.tablet?(ua)
assert Browser.mobile?(ua)
end
test "detects ie11 touch desktop pc" do
ua = Fixtures.ua["IE11_TOUCH_SCREEN"]
assert Browser.name(ua) == "Internet Explorer"
assert Browser.full_browser_name(ua) == "Internet Explorer 11.0"
assert Browser.full_display(ua) == "Internet Explorer 11.0 on Windows 8.1"
assert Browser.ie?(ua)
assert Browser.ie?(ua, 11)
assert Browser.modern?(ua)
refute Browser.compatibility_view?(ua)
refute Browser.windows_rt?(ua)
assert Browser.windows_touchscreen_desktop?(ua)
assert Browser.windows8?(ua)
assert Browser.full_version(ua) == "11.0"
assert Browser.version(ua) == "11"
end
test "detects Microsoft Edge" do
ua = Fixtures.ua["MS_EDGE"]
assert Browser.id(ua) == :edge
assert Browser.name(ua) == "Microsoft Edge"
assert Browser.full_browser_name(ua) == "Microsoft Edge 12.0"
assert Browser.full_display(ua) == "Microsoft Edge 12.0 on Windows 10"
assert Browser.full_version(ua) == "12.0"
assert Browser.version(ua) == "12"
assert Browser.windows10?(ua)
assert Browser.edge?(ua)
assert Browser.modern?(ua)
refute Browser.webkit?(ua)
refute Browser.chrome?(ua)
refute Browser.safari?(ua)
refute Browser.mobile?(ua)
end
test "detects Microsoft Edge in compatibility view" do
ua = Fixtures.ua["MS_EDGE_COMPAT"]
assert Browser.id(ua) == :edge
assert Browser.name(ua) == "Microsoft Edge"
assert Browser.full_browser_name(ua) == "Microsoft Edge 12.0"
assert Browser.full_display(ua) == "Microsoft Edge 12.0 on Windows 8"
assert Browser.full_version(ua) == "12.0"
assert Browser.version(ua) == "12"
assert Browser.msie_full_version(ua) == "7.0"
assert Browser.msie_version(ua) == "7"
assert Browser.edge?(ua)
assert Browser.compatibility_view?(ua)
refute Browser.modern?(ua)
refute Browser.webkit?(ua)
refute Browser.chrome?(ua)
refute Browser.safari?(ua)
refute Browser.mobile?(ua)
end
test "detects Microsoft Edge Mobile" do
ua = Fixtures.ua["MS_EDGE_MOBILE"]
assert Browser.id(ua) == :edge
assert Browser.name(ua) == "Microsoft Edge"
assert Browser.full_browser_name(ua) == "Microsoft Edge 12.0"
assert Browser.full_display(ua) == "Microsoft Edge 12.0 on Android 4.2.1"
assert Browser.full_version(ua) == "12.0"
assert Browser.version(ua) == "12"
refute Browser.windows10?(ua)
assert Browser.windows_phone?(ua)
assert Browser.edge?(ua)
assert Browser.modern?(ua)
assert Browser.mobile?(ua)
refute Browser.webkit?(ua)
refute Browser.chrome?(ua)
refute Browser.safari?(ua)
end
test "detects ie8 without Trident" do
ua = Fixtures.ua["IE8_WITHOUT_TRIDENT"]
assert Browser.id(ua) == :ie
assert Browser.name(ua) == "Internet Explorer"
assert Browser.full_browser_name(ua) == "Internet Explorer 8.0"
assert Browser.full_display(ua) == "Internet Explorer 8.0 on Windows Vista"
assert Browser.msie_full_version(ua) == "8.0"
assert Browser.msie_version(ua) == "8"
assert Browser.full_version(ua) == "8.0"
assert Browser.version(ua) == "8"
refute Browser.windows10?(ua)
refute Browser.windows_phone?(ua)
refute Browser.edge?(ua)
refute Browser.modern_edge?(ua)
refute Browser.modern?(ua)
refute Browser.modern_ie_version?(ua)
refute Browser.mobile?(ua)
refute Browser.webkit?(ua)
refute Browser.chrome?(ua)
refute Browser.safari?(ua)
end
test "detects ie9 without Trident" do
ua = Fixtures.ua["IE9_WITHOUT_TRIDENT"]
assert Browser.name(ua) == "Internet Explorer"
assert Browser.full_browser_name(ua) == "Internet Explorer 9.0"
assert Browser.full_display(ua) == "Internet Explorer 9.0 on Windows 7"
assert Browser.ie?(ua)
assert Browser.ie?(ua, 9)
assert Browser.modern?(ua)
refute Browser.compatibility_view?(ua)
assert Browser.full_version(ua) == "9.0"
assert Browser.version(ua) == "9"
refute Browser.modern_edge?(ua)
assert Browser.modern_ie_version?(ua)
end
test "detects windows phone" do
ua = Fixtures.ua["WINDOWS_PHONE"]
assert Browser.ie?(ua)
assert Browser.version(ua) == "7"
assert Browser.mobile?(ua)
assert Browser.windows_phone?(ua)
refute Browser.windows_mobile?(ua)
refute Browser.tablet?(ua)
end
test "detects windows phone 8" do
ua = Fixtures.ua["WINDOWS_PHONE8"]
assert Browser.ie?(ua)
assert Browser.version(ua) == "10"
assert Browser.mobile?(ua)
assert Browser.windows_phone?(ua)
refute Browser.windows_mobile?(ua)
refute Browser.tablet?(ua)
end
test "detects windows phone 8.1" do
ua = Fixtures.ua["WINDOWS_PHONE_81"]
assert Browser.ie?(ua)
assert Browser.name(ua) == "Internet Explorer"
assert Browser.full_browser_name(ua) == "Internet Explorer 11.0"
assert Browser.full_display(ua) == "Internet Explorer 11.0 on iOS 7"
assert Browser.id(ua) == :ie
assert Browser.version(ua) == "11"
assert Browser.full_version(ua) == "11.0"
assert Browser.mobile?(ua)
assert Browser.windows_phone?(ua)
refute Browser.windows_mobile?(ua)
refute Browser.tablet?(ua)
end
test "detects windows mobile (windows phone 8)" do
ua = Fixtures.ua["WINDOWS_PHONE8"]
assert Browser.ie?(ua)
assert Browser.version(ua) == "10"
assert Browser.mobile?(ua)
assert Browser.windows_phone?(ua)
refute Browser.windows_mobile?(ua)
refute Browser.tablet?(ua)
end
test "detects windows x64" do
ua = Fixtures.ua["IE10_X64_WINX64"]
assert Browser.windows_x64?(ua)
refute Browser.windows_wow64?(ua)
assert Browser.windows_x64_inclusive?(ua)
end
test "detects windows wow64" do
ua = Fixtures.ua["WINDOWS_WOW64"]
refute Browser.windows_x64?(ua)
assert Browser.windows_wow64?(ua)
assert Browser.windows_x64_inclusive?(ua)
end
test "detects windows platform" do
ua = "Windows"
assert Browser.platform(ua) == :windows
assert Browser.windows?(ua)
end
test "detects windows_xp" do
ua = Fixtures.ua["WINDOWS_XP"]
assert Browser.windows?(ua)
assert Browser.windows_xp?(ua)
end
test "detects windows_vista" do
ua = Fixtures.ua["WINDOWS_VISTA"]
assert Browser.windows?(ua)
assert Browser.windows_vista?(ua)
end
test "detects windows7" do
ua = Fixtures.ua["WINDOWS7"]
assert Browser.windows?(ua)
assert Browser.windows7?(ua)
end
test "detects windows8" do
ua = Fixtures.ua["WINDOWS8"]
assert Browser.windows?(ua)
assert Browser.windows8?(ua)
refute Browser.windows8_1?(ua)
end
test "detects windows8.1" do
ua = Fixtures.ua["WINDOWS81"]
assert Browser.windows?(ua)
assert Browser.windows8?(ua)
assert Browser.windows8_1?(ua)
end
test "don't detect as two ie different versions" do
ua = Fixtures.ua["IE8"]
assert Browser.ie?(ua, 8)
refute Browser.ie?(ua, 7)
end
test "detects windows version" do
phone = Fixtures.ua["WINDOWS_PHONE"]
phone8 = Fixtures.ua["WINDOWS_PHONE8"]
phone81 = Fixtures.ua["WINDOWS_PHONE_81"]
none = "Windows"
xp = Fixtures.ua["WINDOWS_XP"]
vista = Fixtures.ua["WINDOWS_VISTA"]
win7 = Fixtures.ua["WINDOWS7"]
win8 = Fixtures.ua["WINDOWS8"]
win81 = Fixtures.ua["WINDOWS81"]
win10 = Fixtures.ua["MS_EDGE"]
assert Browser.windows_version_name(phone) == "Phone"
assert Browser.windows_version_name(phone8) == "Phone"
assert Browser.windows_version_name(phone81) == "Phone"
assert Browser.windows_version_name(none) == ""
assert Browser.windows_version_name(xp) == "XP"
assert Browser.windows_version_name(vista) == "Vista"
assert Browser.windows_version_name(win7) == "7"
assert Browser.windows_version_name(win8) == "8"
assert Browser.windows_version_name(win81) == "8.1"
assert Browser.windows_version_name(win10) == "10"
end
# ios
test "detects iphone" do
ua = Fixtures.ua["IPHONE"]
assert Browser.name(ua) == "iPhone"
assert Browser.full_browser_name(ua) == "iPhone 3.0"
assert Browser.full_display(ua) == "iPhone 3.0 on iOS 3"
assert Browser.iphone?(ua)
assert Browser.safari?(ua)
assert Browser.webkit?(ua)
assert Browser.mobile?(ua)
assert Browser.modern?(ua)
assert Browser.ios?(ua)
refute Browser.tablet?(ua)
refute Browser.mac?(ua)
assert Browser.full_version(ua) == "3.0"
assert Browser.version(ua) == "3"
end
test "detects safari" do
ua = Fixtures.ua["SAFARI"]
assert Browser.name(ua) == "Safari"
assert Browser.full_browser_name(ua) == "Safari 5.0.1"
assert Browser.full_display(ua) == "Safari 5.0.1 on MacOS 10.6.4 Snow Leopard"
assert Browser.safari?(ua)
assert Browser.webkit?(ua)
assert Browser.modern?(ua)
assert Browser.full_version(ua) == "5.0.1"
assert Browser.version(ua) == "5"
end
test "detects safari in webapp mode" do
ua = Fixtures.ua["SAFARI_IPAD_WEBAPP_MODE"]
assert Browser.safari?(ua)
ua = Fixtures.ua["SAFARI_IPHONE_WEBAPP_MODE"]
assert Browser.safari?(ua)
end
test "detects ipod" do
ua = Fixtures.ua["IPOD"]
assert Browser.name(ua) == "iPod Touch"
assert Browser.full_browser_name(ua) == "iPod Touch 3.0"
assert Browser.full_display(ua) == "iPod Touch 3.0 on iOS"
assert Browser.ipod?(ua)
assert Browser.safari?(ua)
assert Browser.webkit?(ua)
assert Browser.mobile?(ua)
assert Browser.modern?(ua)
assert Browser.ios?(ua)
refute Browser.tablet?(ua)
refute Browser.mac?(ua)
assert Browser.full_version(ua) == "3.0"
assert Browser.version(ua) == "3"
end
test "detects ipad" do
ua = Fixtures.ua["IPAD"]
assert Browser.name(ua) == "iPad"
assert Browser.full_browser_name(ua) == "iPad 4.0.4"
assert Browser.full_display(ua) == "iPad 4.0.4 on iOS 3"
assert Browser.ipad?(ua)
assert Browser.safari?(ua)
assert Browser.webkit?(ua)
assert Browser.modern?(ua)
assert Browser.ios?(ua)
assert Browser.tablet?(ua)
refute Browser.mobile?(ua)
refute Browser.mac?(ua)
assert Browser.full_version(ua) == "4.0.4"
assert Browser.version(ua) == "4"
end
test "detects ipad gsa" do
ua = Fixtures.ua["IPAD_GSA"]
assert Browser.name(ua) == "iPad"
assert Browser.full_browser_name(ua) == "iPad 13.1.72140"
assert Browser.full_display(ua) == "iPad 13.1.72140 on iOS 9"
assert Browser.ipad?(ua)
assert Browser.safari?(ua)
assert Browser.webkit?(ua)
assert Browser.modern?(ua)
assert Browser.ios?(ua)
assert Browser.tablet?(ua)
refute Browser.mobile?(ua)
refute Browser.mac?(ua)
assert Browser.full_version(ua) == "13.1.72140"
assert Browser.version(ua) == "13"
end
test "detects ios4" do
ua = Fixtures.ua["IOS4"]
assert Browser.ios?(ua)
assert Browser.ios?(ua, 4)
refute Browser.mac?(ua)
end
test "detects ios5" do
ua = Fixtures.ua["IOS5"]
assert Browser.ios?(ua)
assert Browser.ios?(ua, 5)
refute Browser.mac?(ua)
end
test "detects ios6" do
ua = Fixtures.ua["IOS6"]
assert Browser.ios?(ua)
assert Browser.ios?(ua, 6)
refute Browser.mac?(ua)
end
test "detects ios7" do
ua = Fixtures.ua["IOS7"]
assert Browser.ios?(ua)
assert Browser.ios?(ua, 7)
refute Browser.mac?(ua)
end
test "detects ios8" do
ua = Fixtures.ua["IOS8"]
assert Browser.ios?(ua)
assert Browser.ios?(ua, 8)
refute Browser.mac?(ua)
end
test "detects ios9" do
ua = Fixtures.ua["IOS9"]
assert Browser.ios?(ua)
assert Browser.ios?(ua, 9)
refute Browser.mac?(ua)
end
test "don't detect as two ios different versions" do
ua = Fixtures.ua["IOS8"]
assert Browser.ios?(ua, 8)
refute Browser.ios?(ua, 7)
end
# kindle
test "detects kindle monochrome" do
ua = Fixtures.ua["KINDLE"]
assert Browser.kindle?(ua)
assert Browser.webkit?(ua)
end
test "detects kindle fire" do
ua = Fixtures.ua["KINDLE_FIRE"]
assert Browser.kindle?(ua)
assert Browser.webkit?(ua)
end
test "detects kindle fire hd" do
ua = Fixtures.ua["KINDLE_FIRE_HD"]
assert Browser.silk?(ua)
assert Browser.kindle?(ua)
assert Browser.webkit?(ua)
assert Browser.modern?(ua)
refute Browser.mobile?(ua)
end
test "detects kindle fire hd mobile" do
ua = Fixtures.ua["KINDLE_FIRE_HD_MOBILE"]
assert Browser.silk?(ua)
assert Browser.kindle?(ua)
assert Browser.webkit?(ua)
assert Browser.modern?(ua)
assert Browser.mobile?(ua)
end
# opera
test "detects opera" do
ua = Fixtures.ua["OPERA"]
assert Browser.name(ua) == "Opera"
assert Browser.full_browser_name(ua) == "Opera 11.64"
assert Browser.full_display(ua) == "Opera 11.64 on MacOS 10.7.4 Lion"
assert Browser.opera?(ua)
refute Browser.modern?(ua)
assert Browser.full_version(ua) == "11.64"
assert Browser.version(ua) == "11"
end
test "detects opera next" do
ua = Fixtures.ua["OPERA_NEXT"]
assert Browser.name(ua) == "Opera"
assert Browser.full_browser_name(ua) == "Opera 15.0.1147.44"
assert Browser.full_display(ua) == "Opera 15.0.1147.44 on MacOS 10.8.4 Mountain Lion"
assert Browser.id(ua) == :opera
assert Browser.opera?(ua)
assert Browser.webkit?(ua)
assert Browser.modern?(ua)
refute Browser.chrome?(ua)
assert Browser.full_version(ua) == "15.0.1147.44"
assert Browser.version(ua) == "15"
end
test "detects opera mini" do
ua = Fixtures.ua["OPERA_MINI"]
assert Browser.opera_mini?(ua)
refute Browser.tablet?(ua)
assert Browser.mobile?(ua)
end
test "detects opera mobi" do
ua = Fixtures.ua["OPERA_MOBI"]
assert Browser.opera?(ua)
refute Browser.tablet?(ua)
assert Browser.mobile?(ua)
end
# bug https://github.com/tuvistavie/elixir-browser/issues/13
test "detects opera on android" do
ua = Fixtures.ua["OPERA_ANDROID"]
assert Browser.opera?(ua)
assert Browser.android?(ua)
end
test "detects UC Browser" do
ua = Fixtures.ua["UC_BROWSER"]
assert Browser.name(ua) == "UC Browser"
assert Browser.version(ua) == "8"
assert Browser.uc_browser?(ua)
refute Browser.tablet?(ua)
assert Browser.mobile?(ua)
end
test "fallbacks to other on unknown UA" do
ua = Fixtures.ua["INVALID"]
assert Browser.name(ua) == "Other"
assert Browser.full_browser_name(ua) == "Other 0.0"
assert Browser.full_display(ua) == "Other 0.0 on Other"
assert Browser.id(ua) == :other
assert Browser.full_version(ua) == "0.0"
assert Browser.version(ua) == "0"
end
end
| 29.236351 | 90 | 0.667652 |
ff1cfe8c937220ba6e46f241e2d730479ce686c8 | 44,424 | ex | Elixir | lib/absinthe/schema/notation.ex | myronmarston/absinthe | d94d1bb5d9ede3a8715f5a6ce6ce607126d4d756 | [
"MIT"
] | null | null | null | lib/absinthe/schema/notation.ex | myronmarston/absinthe | d94d1bb5d9ede3a8715f5a6ce6ce607126d4d756 | [
"MIT"
] | null | null | null | lib/absinthe/schema/notation.ex | myronmarston/absinthe | d94d1bb5d9ede3a8715f5a6ce6ce607126d4d756 | [
"MIT"
] | null | null | null | defmodule Absinthe.Schema.Notation do
alias Absinthe.Blueprint.Schema
alias Absinthe.Utils
@moduledoc """
Provides a set of macro's to use when creating a schema. Especially useful
when moving definitions out into a different module than the schema itself.
## Example
defmodule MyAppWeb.Schema.Types do
use Absinthe.Schema.Notation
object :item do
field :id, :id
field :name, :string
end
# ...
end
"""
Module.register_attribute(__MODULE__, :placement, accumulate: true)
defmacro __using__(import_opts \\ [only: :macros]) do
Module.register_attribute(__CALLER__.module, :absinthe_blueprint, accumulate: true)
Module.register_attribute(__CALLER__.module, :absinthe_desc, accumulate: true)
put_attr(__CALLER__.module, %Absinthe.Blueprint{schema: __CALLER__.module})
Module.put_attribute(__CALLER__.module, :absinthe_scope_stack, [:schema])
Module.put_attribute(__CALLER__.module, :absinthe_scope_stack_stash, [])
quote do
import Absinthe.Resolution.Helpers,
only: [
async: 1,
async: 2,
batch: 3,
batch: 4
]
Module.register_attribute(__MODULE__, :__absinthe_type_import__, accumulate: true)
@desc nil
import unquote(__MODULE__), unquote(import_opts)
@before_compile unquote(__MODULE__)
end
end
### Macro API ###
@placement {:config, [under: [:field]]}
@doc """
Configure a subscription field.
## Examples
```elixir
config fn args, %{context: context} ->
if authorized?(context) do
{:ok, topic: args.client_id}
else
{:error, "unauthorized"}
end
end
```
Alternatively can provide a list of topics:
```elixir
config fn _, _ ->
{:ok, topic: ["topic_one", "topic_two", "topic_three"]}
end
```
Using `context_id` option to allow de-duplication of updates:
```elixir
config fn _, %{context: context} ->
if authorized?(context) do
{:ok, topic: "topic_one", context_id: "authorized"}
else
{:ok, topic: "topic_one", context_id: "not-authorized"}
end
end
```
See `Absinthe.Schema.subscription/1` for details
"""
defmacro config(config_fun) do
__CALLER__
|> recordable!(:config, @placement[:config])
|> record_config!(config_fun)
end
@placement {:trigger, [under: [:field]]}
@doc """
Set a trigger for a subscription field.
It accepts one or more mutation field names, and can be called more than once.
```
mutation do
field :gps_event, :gps_event
field :user_checkin, :user
end
subscription do
field :location_update, :user do
arg :user_id, non_null(:id)
config fn args, _ ->
{:ok, topic: args.user_id}
end
trigger :gps_event, topic: fn event ->
event.user_id
end
trigger :user_checkin, topic: fn user ->
[user.id, user.parent_id]
end
end
end
```
Trigger functions are only called once per event, so database calls within
them do not present a significant burden.
See the `subscription/2` macro docs for additional details
"""
defmacro trigger(mutations, attrs) do
__CALLER__
|> recordable!(:trigger, @placement[:trigger])
|> record_trigger!(List.wrap(mutations), attrs)
end
# OBJECT
@placement {:object, [toplevel: true]}
@doc """
Define an object type.
Adds an `Absinthe.Type.Object` to your schema.
## Placement
#{Utils.placement_docs(@placement)}
## Examples
Basic definition:
```
object :car do
# ...
end
```
Providing a custom name:
```
object :car, name: "CarType" do
# ...
end
```
"""
@reserved_identifiers ~w(query mutation subscription)a
defmacro object(identifier, attrs \\ [], block)
defmacro object(identifier, _attrs, _block) when identifier in @reserved_identifiers do
raise Absinthe.Schema.Notation.Error,
"Invalid schema notation: cannot create an `object` " <>
"with reserved identifier `#{identifier}`"
end
defmacro object(identifier, attrs, do: block) do
{attrs, block} =
case Keyword.pop(attrs, :meta) do
{nil, attrs} ->
{attrs, block}
{meta, attrs} ->
meta_ast =
quote do
meta unquote(meta)
end
block = [meta_ast, block]
{attrs, block}
end
__CALLER__
|> recordable!(:object, @placement[:object])
|> record!(Schema.ObjectTypeDefinition, identifier, attrs, block)
end
@placement {:interfaces, [under: [:object]]}
@doc """
Declare implemented interfaces for an object.
See also `interface/1`, which can be used for one interface,
and `interface/3`, used to define interfaces themselves.
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
object :car do
interfaces [:vehicle, :branded]
# ...
end
```
"""
defmacro interfaces(ifaces) when is_list(ifaces) do
__CALLER__
|> recordable!(:interfaces, @placement[:interfaces])
|> record_interfaces!(ifaces)
end
@placement {:deprecate, [under: [:field]]}
@doc """
Mark a field as deprecated
In most cases you can simply pass the deprecate: "message" attribute. However
when using the block form of a field it can be nice to also use this macro.
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
field :foo, :string do
deprecate "Foo will no longer be supported"
end
```
This is how to deprecate other things
```
field :foo, :string do
arg :bar, :integer, deprecate: "This isn't supported either"
end
enum :colors do
value :red
value :blue, deprecate: "This isn't supported"
end
```
"""
defmacro deprecate(msg) do
__CALLER__
|> recordable!(:deprecate, @placement[:deprecate])
|> record_deprecate!(msg)
end
@doc """
Declare an implemented interface for an object.
Adds an `Absinthe.Type.Interface` to your schema.
See also `interfaces/1`, which can be used for multiple interfaces,
and `interface/3`, used to define interfaces themselves.
## Examples
```
object :car do
interface :vehicle
# ...
end
```
"""
@placement {:interface_attribute, [under: [:object]]}
defmacro interface(identifier) do
__CALLER__
|> recordable!(:interface_attribute, @placement[:interface_attribute])
|> record_interface!(identifier)
end
# INTERFACES
@placement {:interface, [toplevel: true]}
@doc """
Define an interface type.
Adds an `Absinthe.Type.Interface` to your schema.
Also see `interface/1` and `interfaces/1`, which declare
that an object implements one or more interfaces.
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
interface :vehicle do
field :wheel_count, :integer
end
object :rally_car do
field :wheel_count, :integer
interface :vehicle
end
```
"""
defmacro interface(identifier, attrs \\ [], do: block) do
__CALLER__
|> recordable!(:interface, @placement[:interface])
|> record!(Schema.InterfaceTypeDefinition, identifier, attrs, block)
end
@placement {:resolve_type, [under: [:interface, :union]]}
@doc """
Define a type resolver for a union or interface.
See also:
* `Absinthe.Type.Interface`
* `Absinthe.Type.Union`
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
interface :entity do
# ...
resolve_type fn
%{employee_count: _}, _ ->
:business
%{age: _}, _ ->
:person
end
end
```
"""
defmacro resolve_type(func_ast) do
__CALLER__
|> recordable!(:resolve_type, @placement[:resolve_type])
|> record_resolve_type!(func_ast)
end
defp handle_field_attrs(attrs, caller) do
block =
for {identifier, arg_attrs} <- Keyword.get(attrs, :args, []) do
quote do
arg unquote(identifier), unquote(arg_attrs)
end
end
{func_ast, attrs} = Keyword.pop(attrs, :resolve)
block =
if func_ast do
[
quote do
resolve unquote(func_ast)
end
]
else
[]
end ++ block
attrs =
attrs
|> expand_ast(caller)
|> Keyword.delete(:args)
|> handle_deprecate
{attrs, block}
end
defp handle_deprecate(attrs) do
deprecation = build_deprecation(attrs[:deprecate])
attrs
|> Keyword.delete(:deprecate)
|> Keyword.put(:deprecation, deprecation)
end
defp build_deprecation(msg) do
case msg do
true -> %Absinthe.Type.Deprecation{reason: nil}
reason when is_binary(reason) -> %Absinthe.Type.Deprecation{reason: reason}
_ -> nil
end
end
# FIELDS
@placement {:field, [under: [:input_object, :interface, :object]]}
@doc """
Defines a GraphQL field
See `field/4`
"""
defmacro field(identifier, attrs) when is_list(attrs) do
{attrs, block} = handle_field_attrs(attrs, __CALLER__)
__CALLER__
|> recordable!(:field, @placement[:field])
|> record!(Schema.FieldDefinition, identifier, attrs, block)
end
defmacro field(identifier, type) do
{attrs, block} = handle_field_attrs([type: type], __CALLER__)
__CALLER__
|> recordable!(:field, @placement[:field])
|> record!(Schema.FieldDefinition, identifier, attrs, block)
end
@doc """
Defines a GraphQL field
See `field/4`
"""
defmacro field(identifier, attrs, do: block) when is_list(attrs) do
{attrs, more_block} = handle_field_attrs(attrs, __CALLER__)
block = more_block ++ List.wrap(block)
__CALLER__
|> recordable!(:field, @placement[:field])
|> record!(Schema.FieldDefinition, identifier, attrs, block)
end
defmacro field(identifier, type, do: block) do
{attrs, _} = handle_field_attrs([type: type], __CALLER__)
__CALLER__
|> recordable!(:field, @placement[:field])
|> record!(Schema.FieldDefinition, identifier, attrs, block)
end
defmacro field(identifier, type, attrs) do
{attrs, block} = handle_field_attrs(Keyword.put(attrs, :type, type), __CALLER__)
__CALLER__
|> recordable!(:field, @placement[:field])
|> record!(Schema.FieldDefinition, identifier, attrs, block)
end
@doc """
Defines a GraphQL field.
## Placement
#{Utils.placement_docs(@placement)}
`query`, `mutation`, and `subscription` are
all objects under the covers, and thus you'll find `field` definitions under
those as well.
## Examples
```
field :id, :id
field :age, :integer, description: "How old the item is"
field :name, :string do
description "The name of the item"
end
field :location, type: :location
```
"""
defmacro field(identifier, type, attrs, do: block) do
attrs = Keyword.put(attrs, :type, type)
{attrs, more_block} = handle_field_attrs(attrs, __CALLER__)
block = more_block ++ List.wrap(block)
__CALLER__
|> recordable!(:field, @placement[:field])
|> record!(Schema.FieldDefinition, identifier, attrs, block)
end
@placement {:resolve, [under: [:field]]}
@doc """
Defines a resolve function for a field
Specify a 2 or 3 arity function to call when resolving a field.
You can either hard code a particular anonymous function, or have a function
call that returns a 2 or 3 arity anonymous function. See examples for more information.
Note that when using a hard coded anonymous function, the function will not
capture local variables.
### 3 Arity Functions
The first argument to the function is the parent entity.
```
{
user(id: 1) {
name
}
}
```
A resolution function on the `name` field would have the result of the `user(id: 1)` field
as its first argument. Top level fields have the `root_value` as their first argument.
Unless otherwise specified, this defaults to an empty map.
The second argument to the resolution function is the field arguments. The final
argument is an `Absinthe.Resolution` struct, which includes information like
the `context` and other execution data.
### 2 Arity Function
Exactly the same as the 3 arity version, but without the first argument (the parent entity)
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
query do
field :person, :person do
resolve &Person.resolve/2
end
end
```
```
query do
field :person, :person do
resolve fn %{id: id}, _ ->
{:ok, Person.find(id)}
end
end
end
```
```
query do
field :person, :person do
resolve lookup(:person)
end
end
def lookup(:person) do
fn %{id: id}, _ ->
{:ok, Person.find(id)}
end
end
```
"""
defmacro resolve(func_ast) do
__CALLER__
|> recordable!(:resolve, @placement[:resolve])
quote do
middleware Absinthe.Resolution, unquote(func_ast)
end
end
@placement {:complexity, [under: [:field]]}
@doc """
Set the complexity of a field
For a field, the first argument to the function you supply to complexity/1 is the user arguments -- just as a field's resolver can use user arguments to resolve its value, the complexity function that you provide can use the same arguments to calculate the field's complexity.
The second argument passed to your complexity function is the sum of all the complexity scores of all the fields nested below the current field.
(If a complexity function accepts three arguments, the third will be an `%Absinthe.Resolution{}` struct, just as with resolvers.)
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
query do
field :people, list_of(:person) do
arg :limit, :integer, default_value: 10
complexity fn %{limit: limit}, child_complexity ->
# set complexity based on maximum number of items in the list and
# complexity of a child.
limit * child_complexity
end
end
end
```
"""
defmacro complexity(func_ast) do
__CALLER__
|> recordable!(:complexity, @placement[:complexity])
|> record_complexity!(func_ast)
end
@placement {:middleware, [under: [:field]]}
defmacro middleware(new_middleware, opts \\ []) do
__CALLER__
|> recordable!(:middleware, @placement[:middleware])
|> record_middleware!(new_middleware, opts)
end
@placement {:is_type_of, [under: [:object]]}
@doc """
## Placement
#{Utils.placement_docs(@placement)}
"""
defmacro is_type_of(func_ast) do
__CALLER__
|> recordable!(:is_type_of, @placement[:is_type_of])
|> record_is_type_of!(func_ast)
end
@placement {:arg, [under: [:directive, :field]]}
# ARGS
@doc """
Add an argument.
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
field do
arg :size, :integer
arg :name, non_null(:string), description: "The desired name"
arg :public, :boolean, default_value: true
end
```
"""
defmacro arg(identifier, type, attrs) do
attrs = handle_arg_attrs(identifier, type, attrs)
__CALLER__
|> recordable!(:arg, @placement[:arg])
|> record!(Schema.InputValueDefinition, identifier, attrs, nil)
end
@doc """
Add an argument.
See `arg/3`
"""
defmacro arg(identifier, attrs) when is_list(attrs) do
attrs = handle_arg_attrs(identifier, nil, attrs)
__CALLER__
|> recordable!(:arg, @placement[:arg])
|> record!(Schema.InputValueDefinition, identifier, attrs, nil)
end
defmacro arg(identifier, type) do
attrs = handle_arg_attrs(identifier, type, [])
__CALLER__
|> recordable!(:arg, @placement[:arg])
|> record!(Schema.InputValueDefinition, identifier, attrs, nil)
end
# SCALARS
@placement {:scalar, [toplevel: true]}
@doc """
Define a scalar type
A scalar type requires `parse/1` and `serialize/1` functions.
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
scalar :time, description: "ISOz time" do
parse &Timex.parse(&1.value, "{ISOz}")
serialize &Timex.format!(&1, "{ISOz}")
end
```
"""
defmacro scalar(identifier, attrs, do: block) do
__CALLER__
|> recordable!(:scalar, @placement[:scalar])
|> record!(Schema.ScalarTypeDefinition, identifier, attrs, block)
end
@doc """
Defines a scalar type
See `scalar/3`
"""
defmacro scalar(identifier, do: block) do
__CALLER__
|> recordable!(:scalar, @placement[:scalar])
|> record!(Schema.ScalarTypeDefinition, identifier, [], block)
end
defmacro scalar(identifier, attrs) do
__CALLER__
|> recordable!(:scalar, @placement[:scalar])
|> record!(Schema.ScalarTypeDefinition, identifier, attrs, nil)
end
@placement {:serialize, [under: [:scalar]]}
@doc """
Defines a serialization function for a `scalar` type
The specified `serialize` function is used on outgoing data. It should simply
return the desired external representation.
## Placement
#{Utils.placement_docs(@placement)}
"""
defmacro serialize(func_ast) do
__CALLER__
|> recordable!(:serialize, @placement[:serialize])
|> record_serialize!(func_ast)
end
@placement {:private,
[under: [:field, :object, :input_object, :enum, :scalar, :interface, :union]]}
@doc false
defmacro private(owner, key, value) do
__CALLER__
|> recordable!(:private, @placement[:private])
|> record_private!(owner, [{key, value}])
end
@placement {:meta,
[under: [:field, :object, :input_object, :enum, :scalar, :interface, :union]]}
@doc """
Defines a metadata key/value pair for a custom type.
For more info see `meta/1`
### Examples
```
meta :cache, false
```
## Placement
#{Utils.placement_docs(@placement)}
"""
defmacro meta(key, value) do
__CALLER__
|> recordable!(:meta, @placement[:meta])
|> record_private!(:meta, [{key, value}])
end
@doc """
Defines list of metadata's key/value pair for a custom type.
This is generally used to facilitate libraries that want to augment Absinthe
functionality
## Examples
```
object :user do
meta cache: true, ttl: 22_000
end
object :user, meta: [cache: true, ttl: 22_000] do
# ...
end
```
The meta can be accessed via the `Absinthe.Type.meta/2` function.
```
user_type = Absinthe.Schema.lookup_type(MyApp.Schema, :user)
Absinthe.Type.meta(user_type, :cache)
#=> true
Absinthe.Type.meta(user_type)
#=> [cache: true, ttl: 22_000]
```
## Placement
#{Utils.placement_docs(@placement)}
"""
defmacro meta(keyword_list) do
__CALLER__
|> recordable!(:meta, @placement[:meta])
|> record_private!(:meta, keyword_list)
end
@placement {:parse, [under: [:scalar]]}
@doc """
Defines a parse function for a `scalar` type
The specified `parse` function is used on incoming data to transform it into
an elixir datastructure.
It should return `{:ok, value}` or `:error`
## Placement
#{Utils.placement_docs(@placement)}
"""
defmacro parse(func_ast) do
__CALLER__
|> recordable!(:parse, @placement[:parse])
|> record_parse!(func_ast)
end
# DIRECTIVES
@placement {:directive, [toplevel: true]}
@doc """
Defines a directive
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
directive :mydirective do
arg :if, non_null(:boolean), description: "Skipped when true."
on [:field, :fragment_spread, :inline_fragment]
expand fn
%{if: true}, node ->
Blueprint.put_flag(node, :skip, __MODULE__)
_, node ->
node
end
end
```
"""
defmacro directive(identifier, attrs \\ [], do: block) do
__CALLER__
|> recordable!(:directive, @placement[:directive])
|> record_directive!(identifier, attrs, block)
end
@placement {:on, [under: [:directive]]}
@doc """
Declare a directive as operating an a AST node type
See `directive/2`
## Placement
#{Utils.placement_docs(@placement)}
"""
defmacro on(ast_node) do
__CALLER__
|> recordable!(:on, @placement[:on])
|> record_locations!(ast_node)
end
@placement {:expand, [under: [:directive]]}
@doc """
Define the expansion for a directive
## Placement
#{Utils.placement_docs(@placement)}
"""
defmacro expand(func_ast) do
__CALLER__
|> recordable!(:expand, @placement[:expand])
|> record_expand!(func_ast)
end
# INPUT OBJECTS
@placement {:input_object, [toplevel: true]}
@doc """
Defines an input object
See `Absinthe.Type.InputObject`
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
input_object :contact_input do
field :email, non_null(:string)
end
```
"""
defmacro input_object(identifier, attrs \\ [], do: block) do
__CALLER__
|> recordable!(:input_object, @placement[:input_object])
|> record!(Schema.InputObjectTypeDefinition, identifier, attrs, block)
end
# UNIONS
@placement {:union, [toplevel: true]}
@doc """
Defines a union type
See `Absinthe.Type.Union`
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
union :search_result do
description "A search result"
types [:person, :business]
resolve_type fn
%Person{}, _ -> :person
%Business{}, _ -> :business
end
end
```
"""
defmacro union(identifier, attrs \\ [], do: block) do
__CALLER__
|> recordable!(:union, @placement[:union])
|> record!(Schema.UnionTypeDefinition, identifier, attrs, block)
end
@placement {:types, [under: [:union]]}
@doc """
Defines the types possible under a union type
See `union/3`
## Placement
#{Utils.placement_docs(@placement)}
"""
defmacro types(types) do
__CALLER__
|> recordable!(:types, @placement[:types])
|> record_types!(types)
end
# ENUMS
@placement {:enum, [toplevel: true]}
@doc """
Defines an enum type
## Placement
#{Utils.placement_docs(@placement)}
## Examples
Handling `RED`, `GREEN`, `BLUE` values from the query document:
```
enum :color do
value :red
value :green
value :blue
end
```
A given query document might look like:
```graphql
{
foo(color: RED)
}
```
Internally you would get an argument in elixir that looks like:
```elixir
%{color: :red}
```
If your return value is an enum, it will get serialized out as:
```json
{"color": "RED"}
```
You can provide custom value mappings. Here we use `r`, `g`, `b` values:
```
enum :color do
value :red, as: "r"
value :green, as: "g"
value :blue, as: "b"
end
```
"""
defmacro enum(identifier, attrs, do: block) do
attrs = handle_enum_attrs(attrs, __CALLER__)
__CALLER__
|> recordable!(:enum, @placement[:enum])
|> record!(Schema.EnumTypeDefinition, identifier, attrs, block)
end
@doc """
Defines an enum type
See `enum/3`
"""
defmacro enum(identifier, do: block) do
__CALLER__
|> recordable!(:enum, @placement[:enum])
|> record!(Schema.EnumTypeDefinition, identifier, [], block)
end
defmacro enum(identifier, attrs) do
attrs = handle_enum_attrs(attrs, __CALLER__)
__CALLER__
|> recordable!(:enum, @placement[:enum])
|> record!(Schema.EnumTypeDefinition, identifier, attrs, [])
end
defp handle_enum_attrs(attrs, env) do
attrs
|> expand_ast(env)
|> Keyword.update(:values, [], fn values ->
Enum.map(values, fn ident ->
value_attrs = handle_enum_value_attrs(ident, module: env.module)
struct!(Schema.EnumValueDefinition, value_attrs)
end)
end)
end
@placement {:value, [under: [:enum]]}
@doc """
Defines a value possible under an enum type
See `enum/3`
## Placement
#{Utils.placement_docs(@placement)}
"""
defmacro value(identifier, raw_attrs \\ []) do
__CALLER__
|> recordable!(:value, @placement[:value])
|> record_value!(identifier, raw_attrs)
end
# GENERAL ATTRIBUTES
@placement {:description, [toplevel: false]}
@doc """
Defines a description
This macro adds a description to any other macro which takes a block.
Note that you can also specify a description by using `@desc` above any item
that can take a description attribute.
## Placement
#{Utils.placement_docs(@placement)}
"""
defmacro description(text) do
__CALLER__
|> recordable!(:description, @placement[:description])
|> record_description!(text)
end
# TYPE UTILITIES
@doc """
Marks a type reference as non null
See `field/3` for examples
"""
defmacro non_null(type) do
%Absinthe.Blueprint.TypeReference.NonNull{of_type: expand_ast(type, __CALLER__)}
end
@doc """
Marks a type reference as a list of the given type
See `field/3` for examples
"""
defmacro list_of(type) do
%Absinthe.Blueprint.TypeReference.List{of_type: expand_ast(type, __CALLER__)}
end
@placement {:import_fields, [under: [:input_object, :interface, :object]]}
@doc """
Import fields from another object
## Example
```
object :news_queries do
field :all_links, list_of(:link)
field :main_story, :link
end
object :admin_queries do
field :users, list_of(:user)
field :pending_posts, list_of(:post)
end
query do
import_fields :news_queries
import_fields :admin_queries
end
```
Import fields can also be used on objects created inside other modules that you
have used import_types on.
```
defmodule MyApp.Schema.NewsTypes do
use Absinthe.Schema.Notation
object :news_queries do
field :all_links, list_of(:link)
field :main_story, :link
end
end
defmodule MyApp.Schema.Schema do
use Absinthe.Schema
import_types MyApp.Schema.NewsTypes
query do
import_fields :news_queries
# ...
end
end
```
"""
defmacro import_fields(source_criteria, opts \\ []) do
source_criteria = expand_ast(source_criteria, __CALLER__)
put_attr(__CALLER__.module, {:import_fields, {source_criteria, opts}})
end
@placement {:import_types, [toplevel: true]}
@doc """
Import types from another module
Very frequently your schema module will simply have the `query` and `mutation`
blocks, and you'll want to break out your other types into other modules. This
macro imports those types for use the current module
## Placement
#{Utils.placement_docs(@placement)}
## Examples
```
import_types MyApp.Schema.Types
import_types MyApp.Schema.Types.{TypesA, TypesB}
```
"""
defmacro import_types(type_module_ast, opts \\ []) do
env = __CALLER__
type_module_ast
|> Macro.expand(env)
|> do_import_types(env, opts)
end
@placement {:import_sdl, [toplevel: true]}
@type import_sdl_option :: {:path, String.t() | Macro.t()}
@doc """
Import types defined using the Schema Definition Language (SDL).
TODO: Explain handlers
## Placement
#{Utils.placement_docs(@placement)}
## Examples
Directly embedded SDL:
```
import_sdl \"""
type Query {
posts: [Post]
}
type Post {
title: String!
body: String!
}
\"""
```
Loaded from a file location (supporting recompilation on change):
```
import_sdl path: "/path/to/sdl.graphql"
```
TODO: Example for dynamic loading during init
"""
@spec import_sdl([import_sdl_option(), ...]) :: Macro.t()
defmacro import_sdl(opts) when is_list(opts) do
__CALLER__
|> do_import_sdl(nil, opts)
end
@spec import_sdl(String.t() | Macro.t(), [import_sdl_option()]) :: Macro.t()
defmacro import_sdl(sdl, opts \\ []) do
__CALLER__
|> do_import_sdl(sdl, opts)
end
defmacro values(values) do
__CALLER__
|> record_values!(values)
end
### Recorders ###
#################
@scoped_types [
Schema.ObjectTypeDefinition,
Schema.FieldDefinition,
Schema.ScalarTypeDefinition,
Schema.EnumTypeDefinition,
Schema.EnumValueDefinition,
Schema.InputObjectTypeDefinition,
Schema.InputValueDefinition,
Schema.UnionTypeDefinition,
Schema.InterfaceTypeDefinition,
Schema.DirectiveDefinition
]
def record!(env, type, identifier, attrs, block) when type in @scoped_types do
attrs = expand_ast(attrs, env)
scoped_def(env, type, identifier, attrs, block)
end
def handle_arg_attrs(identifier, type, raw_attrs) do
raw_attrs
|> Keyword.put_new(:name, to_string(identifier))
|> Keyword.put_new(:type, type)
|> handle_deprecate
end
@doc false
# Record a directive expand function in the current scope
def record_expand!(env, func_ast) do
put_attr(env.module, {:expand, func_ast})
end
@doc false
# Record directive AST nodes in the current scope
def record_locations!(env, locations) do
locations = expand_ast(locations, env)
put_attr(env.module, {:locations, List.wrap(locations)})
end
@doc false
# Record a directive
def record_directive!(env, identifier, attrs, block) do
attrs =
attrs
|> Keyword.put(:identifier, identifier)
|> Keyword.put_new(:name, to_string(identifier))
scoped_def(env, Schema.DirectiveDefinition, identifier, attrs, block)
end
@doc false
# Record a parse function in the current scope
def record_parse!(env, fun_ast) do
put_attr(env.module, {:parse, fun_ast})
end
@doc false
# Record private values
def record_private!(env, owner, keyword_list) when is_list(keyword_list) do
keyword_list = expand_ast(keyword_list, env)
put_attr(env.module, {:__private__, [{owner, keyword_list}]})
end
@doc false
# Record a serialize function in the current scope
def record_serialize!(env, fun_ast) do
put_attr(env.module, {:serialize, fun_ast})
end
@doc false
# Record a type checker in the current scope
def record_is_type_of!(env, func_ast) do
put_attr(env.module, {:is_type_of, func_ast})
# :ok
end
@doc false
# Record a complexity analyzer in the current scope
def record_complexity!(env, func_ast) do
put_attr(env.module, {:complexity, func_ast})
# :ok
end
@doc false
# Record a type resolver in the current scope
def record_resolve_type!(env, func_ast) do
put_attr(env.module, {:resolve_type, func_ast})
# :ok
end
@doc false
# Record an implemented interface in the current scope
def record_interface!(env, identifier) do
put_attr(env.module, {:interface, identifier})
# Scope.put_attribute(env.module, :interfaces, identifier, accumulate: true)
# Scope.recorded!(env.module, :attr, :interface)
# :ok
end
@doc false
# Record a deprecation in the current scope
def record_deprecate!(env, msg) do
msg = expand_ast(msg, env)
deprecation = build_deprecation(msg)
put_attr(env.module, {:deprecation, deprecation})
end
@doc false
# Record a list of implemented interfaces in the current scope
def record_interfaces!(env, ifaces) do
Enum.each(ifaces, &record_interface!(env, &1))
end
@doc false
# Record a list of member types for a union in the current scope
def record_types!(env, types) do
put_attr(env.module, {:types, types})
end
@doc false
# Record an enum type
def record_enum!(env, identifier, attrs, block) do
attrs = expand_ast(attrs, env)
attrs = Keyword.put(attrs, :identifier, identifier)
scoped_def(env, :enum, identifier, attrs, block)
end
defp reformat_description(text), do: String.trim(text)
@doc false
# Record a description in the current scope
def record_description!(env, text_block) do
text = reformat_description(text_block)
put_attr(env.module, {:desc, text})
end
def handle_enum_value_attrs(identifier, raw_attrs) do
raw_attrs
|> expand_ast(raw_attrs)
|> Keyword.put(:identifier, identifier)
|> Keyword.put(:value, Keyword.get(raw_attrs, :as, identifier))
|> Keyword.put_new(:name, String.upcase(to_string(identifier)))
|> Keyword.delete(:as)
|> handle_deprecate
end
@doc false
# Record an enum value in the current scope
def record_value!(env, identifier, raw_attrs) do
attrs = handle_enum_value_attrs(identifier, raw_attrs)
record!(env, Schema.EnumValueDefinition, identifier, attrs, [])
end
@doc false
# Record an enum value in the current scope
def record_values!(env, values) do
values =
values
|> expand_ast(env)
|> Enum.map(fn ident ->
value_attrs = handle_enum_value_attrs(ident, module: env.module)
struct!(Schema.EnumValueDefinition, value_attrs)
end)
put_attr(env.module, {:values, values})
end
def record_config!(env, fun_ast) do
put_attr(env.module, {:config, fun_ast})
end
def record_trigger!(env, mutations, attrs) do
for mutation <- mutations do
put_attr(env.module, {:trigger, {mutation, attrs}})
end
end
def record_middleware!(env, new_middleware, opts) do
new_middleware =
case expand_ast(new_middleware, env) do
{module, fun} ->
{:{}, [], [{module, fun}, opts]}
atom when is_atom(atom) ->
case Atom.to_string(atom) do
"Elixir." <> _ ->
{:{}, [], [{atom, :call}, opts]}
_ ->
{:{}, [], [{env.module, atom}, opts]}
end
val ->
val
end
put_attr(env.module, {:middleware, [new_middleware]})
end
# ------------------------------
@doc false
defmacro pop() do
module = __CALLER__.module
popped = pop_stack(module, :absinthe_scope_stack_stash)
push_stack(module, :absinthe_scope_stack, popped)
put_attr(__CALLER__.module, :pop)
end
@doc false
defmacro stash() do
module = __CALLER__.module
popped = pop_stack(module, :absinthe_scope_stack)
push_stack(module, :absinthe_scope_stack_stash, popped)
put_attr(module, :stash)
end
@doc false
defmacro close_scope() do
put_attr(__CALLER__.module, :close)
pop_stack(__CALLER__.module, :absinthe_scope_stack)
end
def put_reference(attrs, env) do
Keyword.put(attrs, :__reference__, build_reference(env))
end
def build_reference(env) do
%{
module: env.module,
location: %{
file: env.file,
line: env.line
}
}
end
@scope_map %{
Schema.ObjectTypeDefinition => :object,
Schema.FieldDefinition => :field,
Schema.ScalarTypeDefinition => :scalar,
Schema.EnumTypeDefinition => :enum,
Schema.EnumValueDefinition => :value,
Schema.InputObjectTypeDefinition => :input_object,
Schema.InputValueDefinition => :arg,
Schema.UnionTypeDefinition => :union,
Schema.InterfaceTypeDefinition => :interface,
Schema.DirectiveDefinition => :directive
}
defp scoped_def(caller, type, identifier, attrs, body) do
attrs =
attrs
|> Keyword.put(:identifier, identifier)
|> Keyword.put_new(:name, default_name(type, identifier))
|> Keyword.put(:module, caller.module)
|> put_reference(caller)
definition = struct!(type, attrs)
ref = put_attr(caller.module, definition)
push_stack(caller.module, :absinthe_scope_stack, Map.fetch!(@scope_map, type))
[
get_desc(ref),
body,
quote(do: unquote(__MODULE__).close_scope())
]
end
defp get_desc(ref) do
quote do
unquote(__MODULE__).put_desc(__MODULE__, unquote(ref))
end
end
defp push_stack(module, key, val) do
stack = Module.get_attribute(module, key)
stack = [val | stack]
Module.put_attribute(module, key, stack)
end
defp pop_stack(module, key) do
[popped | stack] = Module.get_attribute(module, key)
Module.put_attribute(module, key, stack)
popped
end
def put_attr(module, thing) do
ref = :erlang.unique_integer()
Module.put_attribute(module, :absinthe_blueprint, {ref, thing})
ref
end
defp default_name(Schema.FieldDefinition, identifier) do
identifier
|> Atom.to_string()
end
defp default_name(_, identifier) do
identifier
|> Atom.to_string()
|> Absinthe.Utils.camelize()
end
defp do_import_types({{:., _, [{:__MODULE__, _, _}, :{}]}, _, modules_ast_list}, env, opts) do
for {_, _, leaf} <- modules_ast_list do
type_module = Module.concat([env.module | leaf])
do_import_types(type_module, env, opts)
end
end
defp do_import_types({{:., _, [{:__aliases__, _, root}, :{}]}, _, modules_ast_list}, env, opts) do
root_module = Module.concat(root)
root_module_with_alias = Keyword.get(env.aliases, root_module, root_module)
for {_, _, leaf} <- modules_ast_list do
type_module = Module.concat([root_module_with_alias | leaf])
if Code.ensure_loaded?(type_module) do
do_import_types(type_module, env, opts)
else
raise ArgumentError, "module #{type_module} is not available"
end
end
end
defp do_import_types(module, env, opts) do
Module.put_attribute(env.module, :__absinthe_type_imports__, [
{module, opts} | Module.get_attribute(env.module, :__absinthe_type_imports__) || []
])
[]
end
@spec do_import_sdl(Macro.Env.t(), nil, [import_sdl_option()]) :: Macro.t()
defp do_import_sdl(env, nil, opts) do
case Keyword.fetch(opts, :path) do
{:ok, path} ->
[
quote do
@__absinthe_import_sdl_path__ unquote(path)
end,
do_import_sdl(
env,
quote do
File.read!(@__absinthe_import_sdl_path__)
end,
opts
),
quote do
@external_resource @__absinthe_import_sdl_path__
end
]
:error ->
raise Absinthe.Schema.Notation.Error,
"Must provide `:path` option to `import_sdl` unless passing a raw SDL string as the first argument"
end
end
@spec do_import_sdl(Macro.Env.t(), String.t() | Macro.t(), Keyword.t()) :: Macro.t()
defp do_import_sdl(env, sdl, opts) do
ref = build_reference(env)
quote do
with {:ok, definitions} <-
unquote(__MODULE__).SDL.parse(
unquote(sdl),
__MODULE__,
unquote(Macro.escape(ref)),
unquote(Macro.escape(opts))
) do
@__absinthe_sdl_definitions__ definitions ++
(Module.get_attribute(
__MODULE__,
:__absinthe_sdl_definitions__
) || [])
else
{:error, error} ->
raise Absinthe.Schema.Notation.Error, "`import_sdl` could not parse SDL:\n#{error}"
end
end
end
def put_desc(module, ref) do
Module.put_attribute(module, :absinthe_desc, {ref, Module.get_attribute(module, :desc)})
Module.put_attribute(module, :desc, nil)
end
def noop(_desc) do
:ok
end
defmacro __before_compile__(env) do
module_attribute_descs =
env.module
|> Module.get_attribute(:absinthe_desc)
|> Map.new()
attrs =
env.module
|> Module.get_attribute(:absinthe_blueprint)
|> List.insert_at(0, :close)
|> reverse_with_descs(module_attribute_descs)
imports =
(Module.get_attribute(env.module, :__absinthe_type_imports__) || [])
|> Enum.uniq()
|> Enum.map(fn
module when is_atom(module) -> {module, []}
other -> other
end)
schema_def = %Schema.SchemaDefinition{
imports: imports,
module: env.module,
__reference__: %{
location: %{file: env.file, line: 0}
}
}
blueprint =
attrs
|> List.insert_at(1, schema_def)
|> Absinthe.Blueprint.Schema.build()
# TODO: handle multiple schemas
[schema] = blueprint.schema_definitions
{schema, functions} = lift_functions(schema, env.module)
sdl_definitions =
(Module.get_attribute(env.module, :__absinthe_sdl_definitions__) || [])
|> List.flatten()
|> Enum.map(fn definition ->
Absinthe.Blueprint.prewalk(definition, fn
%{module: _} = node ->
%{node | module: env.module}
node ->
node
end)
end)
{sdl_directive_definitions, sdl_type_definitions} =
Enum.split_with(sdl_definitions, fn
%Absinthe.Blueprint.Schema.DirectiveDefinition{} ->
true
_ ->
false
end)
schema =
schema
|> Map.update!(:type_definitions, &(sdl_type_definitions ++ &1))
|> Map.update!(:directive_definitions, &(sdl_directive_definitions ++ &1))
blueprint = %{blueprint | schema_definitions: [schema]}
quote do
unquote(__MODULE__).noop(@desc)
def __absinthe_blueprint__ do
unquote(Macro.escape(blueprint, unquote: true))
end
unquote_splicing(functions)
end
end
def lift_functions(schema, origin) do
Absinthe.Blueprint.prewalk(schema, [], &lift_functions(&1, &2, origin))
end
def lift_functions(node, acc, origin) do
{node, ast} = functions_for_type(node, origin)
{node, ast ++ acc}
end
defp functions_for_type(%Schema.FieldDefinition{} = type, origin) do
grab_functions(
origin,
type,
{Schema.FieldDefinition, type.function_ref},
Schema.functions(Schema.FieldDefinition)
)
end
defp functions_for_type(%module{identifier: identifier} = type, origin) do
grab_functions(origin, type, {module, identifier}, Schema.functions(module))
end
defp functions_for_type(type, _) do
{type, []}
end
def grab_functions(origin, type, identifier, attrs) do
{ast, type} =
Enum.flat_map_reduce(attrs, type, fn attr, type ->
value = Map.fetch!(type, attr)
ast =
quote do
def __absinthe_function__(unquote(identifier), unquote(attr)) do
unquote(value)
end
end
ref = {:ref, origin, identifier}
type =
Map.update!(type, attr, fn
value when is_list(value) ->
[ref]
_ ->
ref
end)
{[ast], type}
end)
{type, ast}
end
@doc false
def __ensure_middleware__([], _field, %{identifier: :subscription}) do
[Absinthe.Middleware.PassParent]
end
def __ensure_middleware__([], %{identifier: identifier}, _) do
[{Absinthe.Middleware.MapGet, identifier}]
end
# Don't install Telemetry middleware for Introspection fields
@introspection [Absinthe.Phase.Schema.Introspection, Absinthe.Type.BuiltIns.Introspection]
def __ensure_middleware__(middleware, %{definition: definition}, _object)
when definition in @introspection do
middleware
end
# Install Telemetry middleware
def __ensure_middleware__(middleware, _field, _object) do
[{Absinthe.Middleware.Telemetry, []} | middleware]
end
defp reverse_with_descs(attrs, descs, acc \\ [])
defp reverse_with_descs([], _descs, acc), do: acc
defp reverse_with_descs([{ref, attr} | rest], descs, acc) do
if desc = Map.get(descs, ref) do
reverse_with_descs(rest, descs, [attr, {:desc, desc} | acc])
else
reverse_with_descs(rest, descs, [attr | acc])
end
end
defp reverse_with_descs([attr | rest], descs, acc) do
reverse_with_descs(rest, descs, [attr | acc])
end
defp expand_ast(ast, env) do
Macro.prewalk(ast, fn
{_, _, _} = node ->
Macro.expand(node, env)
node ->
node
end)
end
@doc false
# Ensure the provided operation can be recorded in the current environment,
# in the current scope context
def recordable!(env, usage, placement) do
[scope | _] = Module.get_attribute(env.module, :absinthe_scope_stack)
unless recordable?(placement, scope) do
raise Absinthe.Schema.Notation.Error, invalid_message(placement, usage)
end
env
end
defp recordable?([under: under], scope), do: scope in under
defp recordable?([toplevel: true], scope), do: scope == :schema
defp recordable?([toplevel: false], scope), do: scope != :schema
defp invalid_message([under: under], usage) do
allowed = under |> Enum.map(&"`#{&1}`") |> Enum.join(", ")
"Invalid schema notation: `#{usage}` must only be used within #{allowed}"
end
defp invalid_message([toplevel: true], usage) do
"Invalid schema notation: `#{usage}` must only be used toplevel"
end
defp invalid_message([toplevel: false], usage) do
"Invalid schema notation: `#{usage}` must not be used toplevel"
end
end
| 24.182907 | 278 | 0.649199 |
ff1d069961b4a7c88789458a8152d04ce112411e | 600 | ex | Elixir | lib/gateway/rate_limit/common.ex | mmacai/reactive-interaction-gateway | edb9262c65b10a8a5dc21ebf326cf73638e97d36 | [
"Apache-2.0"
] | 1 | 2019-11-06T13:35:35.000Z | 2019-11-06T13:35:35.000Z | lib/gateway/rate_limit/common.ex | mmacai/reactive-interaction-gateway | edb9262c65b10a8a5dc21ebf326cf73638e97d36 | [
"Apache-2.0"
] | null | null | null | lib/gateway/rate_limit/common.ex | mmacai/reactive-interaction-gateway | edb9262c65b10a8a5dc21ebf326cf73638e97d36 | [
"Apache-2.0"
] | null | null | null | defmodule Gateway.RateLimit.Common do
@moduledoc false
require Logger
alias Gateway.RateLimit
def now_unix do
{megasecs, secs, microsecs} = :os.timestamp()
megasecs * 1_000_000 + secs + microsecs / 1_000_000
end
def ensure_table(table_name) do
heir_pid = Process.whereis(RateLimit.TableOwner)
:ets.new table_name, [:set, :public, :named_table, {:heir, heir_pid, :noargs}]
Logger.debug "Created ETS table #{inspect table_name}"
table_name
rescue
_ in ArgumentError ->
# This usually just means that the table already exists
table_name
end
end
| 27.272727 | 82 | 0.71 |
ff1d6d7b3cc3ffeae27c875c8b9a5519b503a84b | 1,489 | ex | Elixir | example/backend/lib/backend_web/channels/user_socket.ex | farolanf/phoenix-socket-dart | 3fb441c53a25e2f7ea6f8f0ca9e7bfc25e45cdfd | [
"BSD-3-Clause"
] | 30 | 2020-11-10T07:32:36.000Z | 2022-02-06T20:34:34.000Z | example/backend/lib/backend_web/channels/user_socket.ex | farolanf/phoenix-socket-dart | 3fb441c53a25e2f7ea6f8f0ca9e7bfc25e45cdfd | [
"BSD-3-Clause"
] | 22 | 2020-11-28T20:23:34.000Z | 2022-03-23T22:32:52.000Z | example/backend/lib/backend_web/channels/user_socket.ex | farolanf/phoenix-socket-dart | 3fb441c53a25e2f7ea6f8f0ca9e7bfc25e45cdfd | [
"BSD-3-Clause"
] | 19 | 2020-11-10T14:22:05.000Z | 2022-02-15T10:22:43.000Z | defmodule BackendWeb.UserSocket do
use Phoenix.Socket
## Channels
channel("channel1", BackendWeb.Channel1)
channel("channel1:*", BackendWeb.Channel1)
channel("channel2", BackendWeb.Channel2)
channel("channel3", BackendWeb.Channel3)
channel("presence:lobby", BackendWeb.PresenceChannel)
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(%{"user_id" => user_id}, socket, _connect_info) do
{:ok, assign(socket, :user_id, user_id)}
end
def connect(_params, socket, _connect_info) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# BackendWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(socket) do
if Map.has_key?(socket.assigns, :user_id) do
"user_socket:#{socket.assigns.user_id}"
else
nil
end
end
end
| 30.387755 | 83 | 0.696441 |
ff1d7880d433c655aab9b339bfe8a4c3c7d03faa | 4,245 | ex | Elixir | lib/epi_contacts/accounts/admin_token.ex | RatioPBC/epi-contacts | 6c43eea52cbfe2097f48b02e3d0c8fce3b46f1ee | [
"Apache-2.0"
] | null | null | null | lib/epi_contacts/accounts/admin_token.ex | RatioPBC/epi-contacts | 6c43eea52cbfe2097f48b02e3d0c8fce3b46f1ee | [
"Apache-2.0"
] | 13 | 2021-06-29T04:35:41.000Z | 2022-02-09T04:25:39.000Z | lib/epi_contacts/accounts/admin_token.ex | RatioPBC/epi-contacts | 6c43eea52cbfe2097f48b02e3d0c8fce3b46f1ee | [
"Apache-2.0"
] | null | null | null | defmodule EpiContacts.Accounts.AdminToken do
@moduledoc """
# Session storage for admins
"""
use Ecto.Schema
import Ecto.Query
@hash_algorithm :sha256
@rand_size 32
# It is very important to keep the reset password token expiry short,
# since someone with access to the email may take over the account.
@reset_password_validity_in_days 1
@confirm_validity_in_days 7
@change_email_validity_in_days 7
@session_validity_in_days 60
schema "admins_tokens" do
field(:token, :binary)
field(:context, :string)
field(:sent_to, :string)
belongs_to(:admin, EpiContacts.Accounts.Admin)
timestamps(updated_at: false)
end
@doc """
Generates a token that will be stored in a signed place,
such as session or cookie. As they are signed, those
tokens do not need to be hashed.
"""
def build_session_token(admin) do
token = :crypto.strong_rand_bytes(@rand_size)
{token, %EpiContacts.Accounts.AdminToken{token: token, context: "session", admin_id: admin.id}}
end
@doc """
Checks if the token is valid and returns its underlying lookup query.
The query returns the admin found by the token.
"""
def verify_session_token_query(token) do
query =
from(token in token_and_context_query(token, "session"),
join: admin in assoc(token, :admin),
where: token.inserted_at > ago(@session_validity_in_days, "day"),
select: admin
)
{:ok, query}
end
@doc """
Builds a token with a hashed counter part.
The non-hashed token is sent to the admin email while the
hashed part is stored in the database, to avoid reconstruction.
The token is valid for a week as long as admins don't change
their email.
"""
def build_email_token(admin, context) do
build_hashed_token(admin, context, admin.email)
end
defp build_hashed_token(admin, context, sent_to) do
token = :crypto.strong_rand_bytes(@rand_size)
hashed_token = :crypto.hash(@hash_algorithm, token)
{Base.url_encode64(token, padding: false),
%EpiContacts.Accounts.AdminToken{
token: hashed_token,
context: context,
sent_to: sent_to,
admin_id: admin.id
}}
end
@doc """
Checks if the token is valid and returns its underlying lookup query.
The query returns the admin found by the token.
"""
def verify_email_token_query(token, context) do
case Base.url_decode64(token, padding: false) do
{:ok, decoded_token} ->
hashed_token = :crypto.hash(@hash_algorithm, decoded_token)
days = days_for_context(context)
query =
from(token in token_and_context_query(hashed_token, context),
join: admin in assoc(token, :admin),
where: token.inserted_at > ago(^days, "day") and token.sent_to == admin.email,
select: admin
)
{:ok, query}
:error ->
:error
end
end
defp days_for_context("confirm"), do: @confirm_validity_in_days
defp days_for_context("reset_password"), do: @reset_password_validity_in_days
@doc """
Checks if the token is valid and returns its underlying lookup query.
The query returns the admin token record.
"""
def verify_change_email_token_query(token, context) do
case Base.url_decode64(token, padding: false) do
{:ok, decoded_token} ->
hashed_token = :crypto.hash(@hash_algorithm, decoded_token)
query =
from(token in token_and_context_query(hashed_token, context),
where: token.inserted_at > ago(@change_email_validity_in_days, "day")
)
{:ok, query}
:error ->
:error
end
end
@doc """
Returns the given token with the given context.
"""
def token_and_context_query(token, context) do
from(EpiContacts.Accounts.AdminToken, where: [token: ^token, context: ^context])
end
@doc """
Gets all tokens for the given admin for the given contexts.
"""
def admin_and_contexts_query(admin, :all) do
from(t in EpiContacts.Accounts.AdminToken, where: t.admin_id == ^admin.id)
end
def admin_and_contexts_query(admin, [_ | _] = contexts) do
from(t in EpiContacts.Accounts.AdminToken, where: t.admin_id == ^admin.id and t.context in ^contexts)
end
end
| 29.075342 | 105 | 0.686455 |
ff1dbe0f79c3858a6638224f950b59839c6cedcf | 1,369 | ex | Elixir | lib/gremlex/vertex.ex | anthonyfalzetti/gremlex | 210ddbfb0ca9448e5e1ad910fc9ae055b4c1c44c | [
"MIT"
] | null | null | null | lib/gremlex/vertex.ex | anthonyfalzetti/gremlex | 210ddbfb0ca9448e5e1ad910fc9ae055b4c1c44c | [
"MIT"
] | null | null | null | lib/gremlex/vertex.ex | anthonyfalzetti/gremlex | 210ddbfb0ca9448e5e1ad910fc9ae055b4c1c44c | [
"MIT"
] | null | null | null | defmodule Gremlex.Vertex do
alias Gremlex.Vertex
alias Gremlex.Deserializer
@type t :: %Gremlex.Vertex{label: String.t(), id: number(), properties: map()}
@enforce_keys [:label, :id]
@derive [Poison.Encoder]
defstruct [:label, :id, :properties]
def add_properties(%Vertex{properties: nil} = vertex, properties) do
Map.put(vertex, :properties, properties)
end
def add_properties(%Vertex{properties: this} = vertex, that) do
properties = Map.merge(this, that)
Map.put(vertex, :properties, properties)
end
def from_response(%{"id" => json_id, "label" => label} = result) do
id =
case json_id do
%{"@type" => id_type, "@value" => id_value} ->
Deserializer.deserialize(id_type, id_value)
id ->
id
end
properties = Map.get(result, "properties", %{})
vertex = %Vertex{id: id, label: label}
serialized_properties =
Enum.reduce(properties, %{}, fn {label, property}, acc ->
values =
Enum.map(property, fn
%{"@value" => %{"value" => %{"@type" => type, "@value" => value}}} ->
Deserializer.deserialize(type, value)
%{"@value" => %{"value" => value}} ->
value
end)
Map.put(acc, String.to_atom(label), values)
end)
Vertex.add_properties(vertex, serialized_properties)
end
end
| 28.520833 | 81 | 0.598247 |
ff1dd454d312810ea4677c553e2fe62691b21ee9 | 529 | ex | Elixir | lib/mix_test_interactive/command/all_tests.ex | kianmeng/mix_test_interactive | 6ce4ea1a1cbe8c926089b5cbb55556a991f5b2e1 | [
"MIT"
] | 32 | 2021-02-19T17:48:55.000Z | 2022-03-28T21:52:01.000Z | lib/mix_test_interactive/command/all_tests.ex | kianmeng/mix_test_interactive | 6ce4ea1a1cbe8c926089b5cbb55556a991f5b2e1 | [
"MIT"
] | 6 | 2021-03-02T08:48:58.000Z | 2021-11-19T16:58:42.000Z | lib/mix_test_interactive/command/all_tests.ex | kianmeng/mix_test_interactive | 6ce4ea1a1cbe8c926089b5cbb55556a991f5b2e1 | [
"MIT"
] | 2 | 2021-11-19T16:32:07.000Z | 2022-03-25T10:32:22.000Z | defmodule MixTestInteractive.Command.AllTests do
@moduledoc """
Run all tests, removing any flags or filters.
"""
alias MixTestInteractive.{Command, Settings}
use Command, command: "a", desc: "run all tests"
@impl Command
def applies?(%Settings{failed?: true}), do: true
def applies?(%Settings{patterns: [_h | _t]}), do: true
def applies?(%Settings{stale?: true}), do: true
def applies?(_settings), do: false
@impl Command
def run(_args, settings) do
{:ok, Settings.all_tests(settings)}
end
end
| 25.190476 | 56 | 0.689981 |
ff1df1c873e7975cade8bd8617f3c30866498122 | 76 | exs | Elixir | backend/test/test_helper.exs | maxrchung/FunctionalVote | 95c54c7614a74718e14c6fe74fd0bd4e84f85444 | [
"MIT"
] | 10 | 2020-03-13T12:56:06.000Z | 2021-06-28T22:13:27.000Z | backend/test/test_helper.exs | maxrchung/FunctionalVote | 95c54c7614a74718e14c6fe74fd0bd4e84f85444 | [
"MIT"
] | 132 | 2020-02-08T02:01:03.000Z | 2022-02-18T20:38:38.000Z | backend/test/test_helper.exs | maxrchung/FunctionalVote | 95c54c7614a74718e14c6fe74fd0bd4e84f85444 | [
"MIT"
] | 1 | 2021-03-17T06:22:55.000Z | 2021-03-17T06:22:55.000Z | ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(FunctionalVote.Repo, :manual)
| 25.333333 | 60 | 0.802632 |
ff1e345baee8209b7fc1217f4314d680258a5cf7 | 247 | ex | Elixir | gaius/lib/gaius.ex | zndx/gaius | 01701d355a3fcbeab49c1f7c6283157357d57336 | [
"MIT"
] | 1 | 2020-10-06T02:01:03.000Z | 2020-10-06T02:01:03.000Z | gaius/lib/gaius.ex | zndx/gaius | 01701d355a3fcbeab49c1f7c6283157357d57336 | [
"MIT"
] | null | null | null | gaius/lib/gaius.ex | zndx/gaius | 01701d355a3fcbeab49c1f7c6283157357d57336 | [
"MIT"
] | null | null | null | defmodule Gaius do
@moduledoc """
Gaius keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 24.7 | 66 | 0.748988 |
ff1e4f3bc3b8dd6eb6aacbe76b9526306c8326ab | 1,167 | ex | Elixir | lib/bittrex/deposits.ex | straw-hat-team/bittrex | 3b6d5c9559d473b685bec7e70d2cd58501805ded | [
"MIT"
] | 9 | 2017-11-17T21:07:50.000Z | 2018-01-19T09:53:03.000Z | lib/bittrex/deposits.ex | straw-hat-team/bittrex | 3b6d5c9559d473b685bec7e70d2cd58501805ded | [
"MIT"
] | 27 | 2017-12-13T12:21:10.000Z | 2019-11-01T10:25:39.000Z | lib/bittrex/deposits.ex | straw-hat-llc/elixir_bittrex | 3b6d5c9559d473b685bec7e70d2cd58501805ded | [
"MIT"
] | 5 | 2017-11-20T20:04:43.000Z | 2018-10-03T16:47:37.000Z | defmodule Bittrex.Deposits do
@moduledoc """
Implements /deposits endpoints.
"""
alias StrawHat.Response
alias Bittrex.{Client, Client.Request, Deposit}
@doc """
List historical deposits.
"""
@spec get_deposits(%Client{}, %{
status: String.t(),
currency_symbol: String.t(),
starting_after: String.t(),
ending_before: String.t(),
limit: integer()
}) :: Response.t([%Deposit{}], Client.error())
def get_deposits(client, params \\ %{}) do
client
|> Request.new()
|> Request.put_method(:get)
|> Request.put_path("/deposits")
|> Request.put_params(params)
|> Client.send()
|> StrawHat.Response.and_then(&Deposit.transform_response/1)
end
@doc """
Retrieve information for a specific deposit. (NOT YET IMPLEMENTED)
"""
@spec get_deposit(%Client{}, String.t()) :: Response.t(%Deposit{}, Client.error())
def get_deposit(client, deposit_id) do
client
|> Request.new()
|> Request.put_method(:get)
|> Request.put_path("/deposits/#{deposit_id}")
|> Client.send()
|> StrawHat.Response.and_then(&Deposit.transform_response/1)
end
end
| 27.785714 | 84 | 0.631534 |
ff1e58b407a7a467f65e77ac66ee3c2a022090e8 | 317 | exs | Elixir | priv/repo/migrations/20180427194318_create_messages.exs | kenforthewin/mentat | 417ce989e13f2f08990b872027ce8dcb3a1e6e99 | [
"MIT"
] | 88 | 2018-06-17T17:36:56.000Z | 2021-11-20T20:29:27.000Z | priv/repo/migrations/20180427194318_create_messages.exs | kenforthewin/scalar | 417ce989e13f2f08990b872027ce8dcb3a1e6e99 | [
"MIT"
] | 47 | 2018-05-12T00:12:37.000Z | 2018-06-16T19:52:52.000Z | priv/repo/migrations/20180427194318_create_messages.exs | kenforthewin/scalar | 417ce989e13f2f08990b872027ce8dcb3a1e6e99 | [
"MIT"
] | 6 | 2018-06-17T17:37:11.000Z | 2020-04-12T04:05:49.000Z | defmodule App.Repo.Migrations.CreateMessages do
use Ecto.Migration
def change do
create table(:messages) do
add :body, :text
add :team_id, :integer
add :user_id, :integer
timestamps()
end
create index(:messages, [:team_id])
create index(:messages, [:user_id])
end
end
| 18.647059 | 47 | 0.649842 |
ff1e62dd1308bcfc26c6fe2e37e9e628f5404bb7 | 1,821 | exs | Elixir | test/recipe/children_watch_test.exs | Zubale/zookeeper-elixir | 5f4ed6c1de20bd30e506c57d7d3b1bf4239bb6b0 | [
"MIT"
] | null | null | null | test/recipe/children_watch_test.exs | Zubale/zookeeper-elixir | 5f4ed6c1de20bd30e506c57d7d3b1bf4239bb6b0 | [
"MIT"
] | 1 | 2021-05-31T17:13:27.000Z | 2021-05-31T17:13:27.000Z | test/recipe/children_watch_test.exs | Zubale/zookeeper-elixir | 5f4ed6c1de20bd30e506c57d7d3b1bf4239bb6b0 | [
"MIT"
] | null | null | null | defmodule Zookeeper.ChildrenWatchTest do
use ExUnit.Case
alias Zookeeper.Client, as: ZK
alias Zookeeper.ChildrenWatch, as: CW
# TODO: test session lost
setup_all do
{:ok, pid} = ZK.start()
pid |> cleanup
{:ok, pid: pid}
end
setup %{pid: pid} = context do
on_exit(context, fn -> cleanup(pid) end)
:ok
end
test "children watch", %{pid: pid} do
path = "/exunit"
{:ok, _pid} = CW.start_link(pid, path)
# Ensure no children are sent if node does not exist.
refute_receive {_, ^path, :children, _}
# Create Node
assert {:ok, path} == ZK.create(pid, path)
assert_receive {CW, _, ^path, :children, []}
# Add First Child
assert {:ok, _} = ZK.create(pid, "#{path}/a")
assert_receive {CW, _, ^path, :children, ["a"]}
# Add Children
assert {:ok, _} = ZK.create(pid, "#{path}/b")
assert_receive {CW, _, ^path, :children, children}
assert Enum.sort(children) == ["a", "b"]
assert {:ok, _} = ZK.create(pid, "#{path}/c")
assert_receive {CW, _, ^path, :children, children}
assert Enum.sort(children) == ["a", "b", "c"]
# Delete Child
assert :ok = ZK.delete(pid, "#{path}/c")
assert_receive {CW, _, ^path, :children, children}
assert Enum.sort(children) == ["a", "b"]
# Delete Node
assert ZK.delete(pid, path, -1, true)
assert_receive {CW, _, ^path, :children, []}
# Recreate Node
assert {:ok, _} = ZK.create(pid, "#{path}/a", "", makepath: true)
assert_receive {CW, _, ^path, :children, ["a"]}
end
test "watcher should die if zookeeper dies" do
{:ok, zk} = ZK.start()
{:ok, cw} = CW.start(zk, "/test")
Process.exit(zk, :shutdown)
:timer.sleep(1)
refute Process.alive?(cw)
end
defp cleanup(pid) do
pid |> ZK.delete("/exunit", -1, true)
end
end
| 26.014286 | 69 | 0.590884 |
ff1e68413bea951ba2182645c439f24b50be1bf5 | 4,949 | ex | Elixir | lib/kino.ex | maartenvanvliet/kino | 259d93f5d0dab9a085eaaa8d9eb0824063be83a2 | [
"Apache-2.0"
] | null | null | null | lib/kino.ex | maartenvanvliet/kino | 259d93f5d0dab9a085eaaa8d9eb0824063be83a2 | [
"Apache-2.0"
] | null | null | null | lib/kino.ex | maartenvanvliet/kino | 259d93f5d0dab9a085eaaa8d9eb0824063be83a2 | [
"Apache-2.0"
] | null | null | null | defmodule Kino do
@moduledoc """
Client-driven interactive widgets for Livebook.
Kino is the library used by Livebook to render rich and interactive
output directly from your Elixir code.
Kino renders any data structure that implements the `Kino.Render`
protocol, falling back to the `inspect/2` representation whenever
an implementation is not available. The data structures supported
by Kino out of the box are:
### VegaLite
`VegaLite` specifications are rendered as visualizations:
Vl.new(...)
|> Vl.data_from_series(...)
|> ...
### Kino.VegaLite
`Kino.VegaLite` is an extension of `VegaLite` that allows data to
be streamed:
widget =
Vl.new(...)
|> Vl.data_from_series(...)
|> ...
|> Kino.VegaLite.new()
|> tap(&Kino.render/1)
Kino.VegaLite.push(widget, %{x: 1, y: 2})
### Kino.ETS
`Kino.ETS` implements a data table output for ETS tables in the
system:
tid = :ets.new(:users, [:set, :public])
Kino.ETS.new(tid)
### Kino.DataTable
`Kino.DataTable` implements a data table output for user-provided
tabular data:
data = [
%{id: 1, name: "Elixir", website: "https://elixir-lang.org"},
%{id: 2, name: "Erlang", website: "https://www.erlang.org"}
]
Kino.DataTable.new(data)
### Kino.Image
`Kino.Image` wraps binary image content and can be used to render
raw images of any given format:
content = File.read!("/path/to/image.jpeg")
Kino.Image.new(content, "image/jpeg")
### Kino.Markdown
`Kino.Markdown` wraps Markdown content for richer text rendering.
Kino.Markdown.new(\"\"\"
# Example
A regular Markdown file.
## Code
```elixir
"Elixir" |> String.graphemes() |> Enum.frequencies()
```
## Table
| ID | Name | Website |
| -- | ------ | ----------------------- |
| 1 | Elixir | https://elixir-lang.org |
| 2 | Erlang | https://www.erlang.org |
\"\"\")
### Kino.Ecto
`Kino.Ecto` implements a data table output for arbitrary
`Ecto` queries:
Kino.Ecto.new(Weather, Repo)
### All others
All other data structures are rendered as text using Elixir's
`inspect/2`.
"""
@doc """
Sends the given term as cell output.
This allows any Livebook cell to have multiple evaluation
results. You can think of this function as a generalized
`IO.puts/2` that works for any type.
"""
@spec render(term()) :: :"do not show this result in output"
def render(term) do
gl = Process.group_leader()
ref = Process.monitor(gl)
output = Kino.Render.to_livebook(term)
send(gl, {:io_request, self(), ref, {:livebook_put_output, output}})
receive do
{:io_reply, ^ref, :ok} -> :ok
{:io_reply, ^ref, _} -> :error
{:DOWN, ^ref, :process, _object, _reason} -> :error
end
Process.demonitor(ref)
:"do not show this result in output"
end
@doc """
Configures Kino.
The supported options are:
* `:inspect`
They are discussed individually in the sections below.
## Inspect
A keyword list containing inspect options used for printing
usual evaluation results. Defaults to pretty formatting with
a limit of 50 entries.
To show more entries, you configure a higher limit:
Kino.configure(inspect: [limit: 200])
You can also show all entries by setting the limit to `:infinity`,
but keep in mind that for large data structures it is memory-expensive
and is not an advised configuration in this case. Instead prefer
the use of `IO.inspect/2` with `:infinity` limit when needed.
See `Inspect.Opts` for the full list of options.
"""
@spec configure(keyword()) :: :ok
def configure(options) do
Kino.Config.configure(options)
end
@doc ~S"""
Returns a widget that periodically calls the given function
to render a new result.
The callback is run every `interval_ms` milliseconds and receives
the accumulated value. The callback should return either of:
* `{:cont, term_to_render, acc}` - the continue
* `:halt` - to no longer schedule callback evaluation
This function uses `Kino.Frame` as the underlying widget.
## Examples
# Render new Markdown every 100ms
Kino.animate(100, 0, fn i ->
md = Kino.Markdown.new("**Iteration: `#{i}`**")
{:cont, md, i + 1}
end)
"""
@spec animate(
pos_integer(),
term(),
(term() -> {:cont, term(), acc :: term()} | :halt)
) :: :"do not show this result in output"
def animate(interval_ms, acc, fun) do
widget = Kino.Frame.new()
Kino.Frame.periodically(widget, interval_ms, acc, fn acc ->
case fun.(acc) do
{:cont, term, acc} ->
Kino.Frame.render(widget, term)
{:cont, acc}
:halt ->
:halt
end
end)
Kino.render(widget)
end
end
| 24.869347 | 72 | 0.621338 |
ff1e73ff44e0a3b58ed747882e32472aaec771c2 | 220 | ex | Elixir | lib/parseus/validators/inclusion_validator.ex | surgeventures/parseus | 3c26c41dff8bdfb9851e5ebea1be2a4fa4ea6bd2 | [
"MIT"
] | 2 | 2017-10-18T17:53:43.000Z | 2020-05-01T23:31:36.000Z | lib/parseus/validators/inclusion_validator.ex | surgeventures/parseus | 3c26c41dff8bdfb9851e5ebea1be2a4fa4ea6bd2 | [
"MIT"
] | null | null | null | lib/parseus/validators/inclusion_validator.ex | surgeventures/parseus | 3c26c41dff8bdfb9851e5ebea1be2a4fa4ea6bd2 | [
"MIT"
] | null | null | null | defmodule Parseus.InclusionValidator do
@moduledoc false
def call(input, allowed_values) do
if input in allowed_values do
:ok
else
{:error, nil, allowed_values: allowed_values}
end
end
end
| 18.333333 | 51 | 0.7 |
ff1e82da060e61f743bdd20eed48e237e7b1e92c | 214 | exs | Elixir | test/test_helper.exs | ravernkoh/awesome-lists | ed8f93d6bc1e5968303a484ebe8f0036ddd0db7b | [
"MIT"
] | null | null | null | test/test_helper.exs | ravernkoh/awesome-lists | ed8f93d6bc1e5968303a484ebe8f0036ddd0db7b | [
"MIT"
] | null | null | null | test/test_helper.exs | ravernkoh/awesome-lists | ed8f93d6bc1e5968303a484ebe8f0036ddd0db7b | [
"MIT"
] | null | null | null | # Wallaby comfiguration
Application.put_env(:wallaby, :base_url, AwesomeWeb.Endpoint.url)
{:ok, _} = Application.ensure_all_started(:wallaby)
ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(Awesome.Repo, :manual)
| 23.777778 | 65 | 0.780374 |
ff1e98a780e9d5c83e637fc913d6fabede07088d | 1,531 | exs | Elixir | apps/tai/test/tai/venues/boot/products_test.exs | chrism2671/tai-1 | 847827bd23908adfad4a82c83d5295bdbc022796 | [
"MIT"
] | null | null | null | apps/tai/test/tai/venues/boot/products_test.exs | chrism2671/tai-1 | 847827bd23908adfad4a82c83d5295bdbc022796 | [
"MIT"
] | null | null | null | apps/tai/test/tai/venues/boot/products_test.exs | chrism2671/tai-1 | 847827bd23908adfad4a82c83d5295bdbc022796 | [
"MIT"
] | 1 | 2020-05-03T23:32:11.000Z | 2020-05-03T23:32:11.000Z | defmodule Tai.Venues.Boot.ProductsTest do
use ExUnit.Case, async: false
defmodule MyAdapter do
def products(_) do
products = [
struct(Tai.Venues.Product, symbol: :btc_usd),
struct(Tai.Venues.Product, symbol: :eth_usd)
]
{:ok, products}
end
end
setup do
on_exit(fn ->
Application.stop(:tai)
end)
{:ok, _} = Application.ensure_all_started(:tai)
:ok
end
test "allows filtering with custom function" do
config =
Tai.Config.parse(
venues: %{
my_venue: [
enabled: true,
adapter: MyAdapter,
products: &custom_filter_helper(&1, :eth_usd)
]
}
)
assert %{my_venue: venue} = Tai.Venues.Config.parse(config)
assert {:ok, [%{symbol: :eth_usd}]} = Tai.Venues.Boot.Products.hydrate(venue)
end
test ".hydrate broadcasts a summary event" do
config =
Tai.Config.parse(
venues: %{my_venue: [enabled: true, adapter: MyAdapter, products: "btc_usd"]}
)
%{my_venue: venue} = Tai.Venues.Config.parse(config)
Tai.Events.subscribe(Tai.Events.HydrateProducts)
Tai.Venues.Boot.Products.hydrate(venue)
assert_receive {Tai.Event,
%Tai.Events.HydrateProducts{
venue_id: :my_venue,
total: 2,
filtered: 1
}, _}
end
def custom_filter_helper(products, exact_match),
do: Enum.filter(products, &(&1.symbol == exact_match))
end
| 24.693548 | 85 | 0.582626 |
ff1e9deb85896d05577ddd3c1a94a6afd18e092e | 2,751 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/invideo_promotion.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/invideo_promotion.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/invideo_promotion.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.YouTube.V3.Model.InvideoPromotion do
@moduledoc """
Describes an invideo promotion campaign consisting of multiple promoted items. A campaign belongs to a single channel_id.
## Attributes
* `defaultTiming` (*type:* `GoogleApi.YouTube.V3.Model.InvideoTiming.t`, *default:* `nil`) - The default temporal position within the video where the promoted item will be displayed. Can be overriden by more specific timing in the item.
* `items` (*type:* `list(GoogleApi.YouTube.V3.Model.PromotedItem.t)`, *default:* `nil`) - List of promoted items in decreasing priority.
* `position` (*type:* `GoogleApi.YouTube.V3.Model.InvideoPosition.t`, *default:* `nil`) - The spatial position within the video where the promoted item will be displayed.
* `useSmartTiming` (*type:* `boolean()`, *default:* `nil`) - Indicates whether the channel's promotional campaign uses "smart timing." This feature attempts to show promotions at a point in the video when they are more likely to be clicked and less likely to disrupt the viewing experience. This feature also picks up a single promotion to show on each video.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:defaultTiming => GoogleApi.YouTube.V3.Model.InvideoTiming.t(),
:items => list(GoogleApi.YouTube.V3.Model.PromotedItem.t()),
:position => GoogleApi.YouTube.V3.Model.InvideoPosition.t(),
:useSmartTiming => boolean()
}
field(:defaultTiming, as: GoogleApi.YouTube.V3.Model.InvideoTiming)
field(:items, as: GoogleApi.YouTube.V3.Model.PromotedItem, type: :list)
field(:position, as: GoogleApi.YouTube.V3.Model.InvideoPosition)
field(:useSmartTiming)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.InvideoPromotion do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.InvideoPromotion.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.InvideoPromotion do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 49.125 | 363 | 0.74482 |
ff1eb81359f01bb22a247fd7bc85e5298db512a8 | 1,109 | exs | Elixir | config/config.exs | joe-maguire/irc | 28117ab38976d20d959bab031e954ce4dbdb4957 | [
"MIT"
] | 1 | 2018-04-04T23:27:07.000Z | 2018-04-04T23:27:07.000Z | config/config.exs | joe-maguire/irc | 28117ab38976d20d959bab031e954ce4dbdb4957 | [
"MIT"
] | null | null | null | config/config.exs | joe-maguire/irc | 28117ab38976d20d959bab031e954ce4dbdb4957 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :irc, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:irc, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 35.774194 | 73 | 0.749324 |
ff1ecf3c20f789ed334c6a58d279cc93fa0415ef | 2,120 | ex | Elixir | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/import_context_bak_import_options_encryption_options.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/import_context_bak_import_options_encryption_options.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/import_context_bak_import_options_encryption_options.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SQLAdmin.V1beta4.Model.ImportContextBakImportOptionsEncryptionOptions do
@moduledoc """
## Attributes
* `certPath` (*type:* `String.t`, *default:* `nil`) - Path to the Certificate (.cer) in Cloud Storage, in the form *gs://bucketName/fileName*. The instance must have write permissions to the bucket and read access to the file.
* `pvkPassword` (*type:* `String.t`, *default:* `nil`) - Password that encrypts the private key
* `pvkPath` (*type:* `String.t`, *default:* `nil`) - Path to the Certificate Private Key (.pvk) in Cloud Storage, in the form *gs://bucketName/fileName*. The instance must have write permissions to the bucket and read access to the file.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:certPath => String.t(),
:pvkPassword => String.t(),
:pvkPath => String.t()
}
field(:certPath)
field(:pvkPassword)
field(:pvkPath)
end
defimpl Poison.Decoder,
for: GoogleApi.SQLAdmin.V1beta4.Model.ImportContextBakImportOptionsEncryptionOptions do
def decode(value, options) do
GoogleApi.SQLAdmin.V1beta4.Model.ImportContextBakImportOptionsEncryptionOptions.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.SQLAdmin.V1beta4.Model.ImportContextBakImportOptionsEncryptionOptions do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.551724 | 241 | 0.729245 |
ff1ef7effafb58ec71d801eda5296674dbdbb778 | 337 | exs | Elixir | priv/repo/migrations/02_create_org_tag.exs | ench0/blog | 04f7df2357b13dddee9d82cd1c35bbd0ce9618a9 | [
"MIT"
] | 2 | 2017-06-08T23:28:13.000Z | 2017-06-08T23:28:16.000Z | priv/repo/migrations/02_create_org_tag.exs | ench0/blog | 04f7df2357b13dddee9d82cd1c35bbd0ce9618a9 | [
"MIT"
] | null | null | null | priv/repo/migrations/02_create_org_tag.exs | ench0/blog | 04f7df2357b13dddee9d82cd1c35bbd0ce9618a9 | [
"MIT"
] | null | null | null | defmodule Blog.Repo.Migrations.CreateBlog.Org.Tag do
use Ecto.Migration
def change do
create table(:org_tags) do
add :name, :string
add :slug, :string
add :active, :boolean, default: true, null: false
end
create unique_index(:org_tags, [:name])
create unique_index(:org_tags, [:slug])
end
end
| 19.823529 | 55 | 0.664688 |
ff1f118f9aa901897d345bffbb7c382a0cdf9d0d | 1,030 | ex | Elixir | test/support/conn_case.ex | ConnorRigby/lou | 5fd68f99b72c82bd4cdd9a6ddeccbe967f7a74a1 | [
"MIT"
] | 1 | 2019-10-23T12:17:34.000Z | 2019-10-23T12:17:34.000Z | test/support/conn_case.ex | ConnorRigby/lou | 5fd68f99b72c82bd4cdd9a6ddeccbe967f7a74a1 | [
"MIT"
] | null | null | null | test/support/conn_case.ex | ConnorRigby/lou | 5fd68f99b72c82bd4cdd9a6ddeccbe967f7a74a1 | [
"MIT"
] | null | null | null | defmodule LouWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
alias LouWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint LouWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Lou.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Lou.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 26.410256 | 65 | 0.712621 |
ff1f202e07d54da04339ca618d8a5896b73cf3e7 | 5,641 | exs | Elixir | lib/mix/test/mix/umbrella_test.exs | enokd/elixir | e39b32f235082b8a29fcb22d250c822cca98609f | [
"Apache-2.0"
] | 1 | 2015-11-12T19:23:45.000Z | 2015-11-12T19:23:45.000Z | lib/mix/test/mix/umbrella_test.exs | enokd/elixir | e39b32f235082b8a29fcb22d250c822cca98609f | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/umbrella_test.exs | enokd/elixir | e39b32f235082b8a29fcb22d250c822cca98609f | [
"Apache-2.0"
] | null | null | null | Code.require_file "../test_helper.exs", __DIR__
defmodule Mix.UmbrellaTest do
use MixTest.Case
test "compiles umbrella" do
in_fixture "umbrella_dep/deps/umbrella", fn ->
Mix.Project.in_project(:umbrella, ".", fn _ ->
Mix.Task.run "compile"
assert_received { :mix_shell, :info, ["==> bar"] }
assert_received { :mix_shell, :info, ["Compiled lib/bar.ex"] }
assert_received { :mix_shell, :info, ["Generated bar.app"] }
assert_received { :mix_shell, :info, ["==> foo"] }
assert_received { :mix_shell, :info, ["Compiled lib/foo.ex"] }
assert_received { :mix_shell, :info, ["Generated foo.app"] }
# Ensure foo was loaded and in the same env as Mix.env
assert_received { :mix_shell, :info, [":foo env is dev"] }
assert_received { :mix_shell, :info, [":bar env is dev"] }
end)
end
end
test "dependencies in umbrella" do
in_fixture "umbrella_dep/deps/umbrella", fn ->
Mix.Project.in_project(:umbrella, ".", [build_per_environment: true], fn _ ->
Mix.Task.run "deps"
assert_received { :mix_shell, :info, ["* bar (apps/bar)"] }
assert_received { :mix_shell, :info, ["* foo (apps/foo)"] }
# Ensure we can compile and run checks
Mix.Task.run "deps.compile"
Mix.Task.run "deps.check"
# Ensure we can also start each app and
# they won't remove each other build
Mix.Task.run "compile"
Mix.Task.clear
Mix.Task.run "app.start", ["--no-compile"]
end)
end
end
defmodule UmbrellaDeps do
def project do
[ apps_path: "apps",
deps: [{ :some_dep, path: "deps/some_dep" }] ]
end
end
test "loads umbrella dependencies" do
Mix.Project.push UmbrellaDeps
in_fixture "umbrella_dep/deps/umbrella", fn ->
File.mkdir_p!("deps/some_dep/ebin")
File.mkdir_p!("_build/dev/lib/some_dep/ebin")
File.mkdir_p!("_build/dev/lib/foo/ebin")
File.mkdir_p!("_build/dev/lib/bar/ebin")
Mix.Task.run "deps.loadpaths", ["--no-deps-check"]
Mix.Task.run "loadpaths", ["--no-elixir-version-check"]
assert Path.expand('_build/dev/lib/some_dep/ebin') in :code.get_path
assert Path.expand('_build/dev/lib/foo/ebin') in :code.get_path
assert Path.expand('_build/dev/lib/bar/ebin') in :code.get_path
end
end
defmodule CycleDeps do
def project do
[ app: :umbrella_dep,
deps: [
{ :bar, path: "deps/umbrella/apps/bar" },
{ :umbrella, path: "deps/umbrella" }
] ]
end
end
test "handles dependencies with cycles" do
Mix.Project.push CycleDeps
in_fixture "umbrella_dep", fn ->
assert Enum.map(Mix.Dep.loaded([]), & &1.app) == [:foo, :bar, :umbrella]
end
end
test "handles dependencies with cycles and overridden deps" do
in_fixture "umbrella_dep/deps/umbrella", fn ->
Mix.Project.in_project :umbrella, ".", fn _ ->
File.write! "apps/foo/mix.exs", """
defmodule Foo.Mix do
use Mix.Project
def project do
# Ensure we have the proper environment
:dev = Mix.env
[ app: :foo,
version: "0.1.0",
deps: [{ :bar, in_umbrella: true }] ]
end
end
"""
File.write! "apps/bar/mix.exs", """
defmodule Bar.Mix do
use Mix.Project
def project do
# Ensure we have the proper environment
:dev = Mix.env
[ app: :bar,
version: "0.1.0",
deps: [{ :a, path: "deps/a" },
{ :b, path: "deps/b" }] ]
end
end
"""
assert Enum.map(Mix.Dep.loaded([]), & &1.app) == [:a, :b, :bar, :foo]
end
end
end
test "list deps for umbrella as dependency" do
in_fixture("umbrella_dep", fn ->
Mix.Project.in_project(:umbrella_dep, ".", fn _ ->
Mix.Task.run "deps"
assert_received { :mix_shell, :info, ["* umbrella (deps/umbrella)"] }
assert_received { :mix_shell, :info, ["* foo (apps/foo)"] }
end)
end)
end
test "compile for umbrella as dependency" do
in_fixture "umbrella_dep", fn ->
Mix.Project.in_project(:umbrella_dep, ".", fn _ ->
Mix.Task.run "deps.compile"
assert "hello world" == Bar.bar
end)
end
end
test "recompiles after path dependency changed" do
in_fixture("umbrella_dep/deps/umbrella/apps", fn ->
Mix.Project.in_project(:bar, "bar", fn _ ->
Mix.Tasks.Deps.Compile.run []
assert Mix.Tasks.Compile.Elixir.run([]) == :ok
assert Mix.Tasks.Compile.Elixir.run([]) == :noop
assert_received { :mix_shell, :info, ["Compiled lib/foo.ex"] }
purge [Bar]
future = { { 2020, 4, 17 }, { 14, 0, 0 } }
manifest = "_build/dev/lib/foo/.compile.elixir"
File.mkdir_p!(Path.dirname(manifest))
File.touch!(manifest, future)
assert Mix.Tasks.Compile.Elixir.run([]) == :ok
end)
end)
end
defmodule Selective do
def project do
[ apps_path: "apps",
apps: [:foo, :bar] ]
end
end
test "can select which apps to use" do
in_fixture("umbrella_dep/deps/umbrella", fn ->
Mix.Project.push Selective
File.mkdir_p! "apps/errors/lib"
File.write! "apps/errors/lib/always_fail.ex", "raise ~s[oops]"
assert Mix.Task.run("compile.elixir") == [:ok, :ok]
assert_received { :mix_shell, :info, ["Compiled lib/bar.ex"] }
assert_received { :mix_shell, :info, ["Compiled lib/foo.ex"] }
end)
end
end
| 30.491892 | 83 | 0.58128 |
ff1f7655b2dd5e7d3ab5f8464691eb7a6c962fb1 | 6,170 | ex | Elixir | lib/mix/tasks/talon.gen.components.ex | talonframework/ex_admin | d164c065ac74c156c3abeca8437e184456db903c | [
"MIT"
] | 170 | 2017-05-25T15:09:53.000Z | 2021-07-09T04:04:14.000Z | lib/mix/tasks/talon.gen.components.ex | talonframework/ex_admin | d164c065ac74c156c3abeca8437e184456db903c | [
"MIT"
] | 78 | 2017-05-25T09:41:55.000Z | 2019-01-04T20:53:35.000Z | lib/mix/tasks/talon.gen.components.ex | talonframework/ex_admin | d164c065ac74c156c3abeca8437e184456db903c | [
"MIT"
] | 8 | 2017-06-24T12:28:06.000Z | 2018-09-17T16:11:56.000Z | defmodule Mix.Tasks.Talon.Gen.Components do
@moduledoc """
Create the installed components.
mix talon.gen.components theme
This is a supporting mix task that is used by the talon.gen.theme task.
It is the foundation for an optional component installer.
## Options
* --root-path=lib/my_app/talon -- set talon's root path
* --path-prefix="" -- set talon's path prefix
* --dry-run -- print what will be done, but don't create any files
* --verbose -- Print extra information
"""
use Mix.Task
import Mix.Talon
require Talon.Config, as: Config
# list all supported boolean options
@boolean_options ~w(verbose dry_run)a
# complete list of supported options
@switches [
root_path: :string, proj_struct: :string, path_prefix: :string
] ++ Enum.map(@boolean_options, &({&1, :boolean}))
@component_path "priv/templates/talon.gen.components/components/*"
@doc """
The entry point of the mix task.
"""
@spec run(List.t) :: any
def run(args) do
Mix.shell.info "Running talon.gen.components"
{opts, parsed, _unknown} = OptionParser.parse(args, switches: @switches)
# TODO: Add args verification
# verify_args!(parsed, unknown)
opts
|> parse_options(parsed)
|> do_config
|> do_run
end
defp do_run(config) do
log config, inspect(config), label: "config"
config
|> get_components
|> gen_components
end
defp get_components(config) do
components =
:talon
|> Application.app_dir(@component_path)
|> Path.wildcard
|> Enum.filter_map(&File.dir?/1, & {Path.basename(&1), &1})
{config, components}
end
defp gen_components({config, components}) do
components
|> Enum.reduce(config, &gen_component(&2, &1))
end
defp gen_component(config, component) do
config
|> gen_component_views(component)
|> gen_component_templates(component)
end
defp gen_component_views(config, {_component, component_path} = comp_info) do
source_path = Path.join([component_path, "views"])
target_path = Path.join([config.root_path, "views", config.path_prefix,
config.concern_path, config.theme_name, "components"])
gen_files(config, comp_info, source_path, target_path)
end
defp gen_component_templates(config, {component, component_path} = comp_info) do
source_path = Path.join([component_path, "templates"])
target_path = Path.join([config.root_path, "templates", config.path_prefix,
config.concern_path, config.theme_name, "components", component])
gen_files(config, comp_info, source_path, target_path)
end
defp gen_files(config, {_, component_path}, source_path, target_path) do
file_names =
source_path
|> Path.join("*")
|> Path.wildcard
|> Enum.map(&Path.basename/1)
binding = Kernel.binding() ++ [base: config.base, theme_name: config.theme_name,
theme_module: config.theme_module, web_namespace: config.web_namespace,
view_opts: config.view_opts, concern_path: config.concern_path, concern: config.concern]
infos =
file_names
|> Enum.map(& "copy #{source_path}/#{&1} to #{target_path}/#{&1}")
if config.dry_run do
Enum.each infos, &(Mix.shell.info("# " <> &1))
else
if config.verbose, do: Enum.each(infos, &Mix.shell.info/1)
File.mkdir_p! target_path
copy_from source_path, target_path, binding,
Enum.map(file_names, & {:eex, &1, &1}), config
end
config
end
defp do_config({bin_opts, opts, _parsed} = _args) do
# themes = get_available_themes()
{concern, theme_name} = process_concern_theme(opts)
target_name = Keyword.get(opts, :target_theme, theme_name)
target_module = Inflex.camelize(target_name)
binding =
Mix.Project.config
|> Keyword.fetch!(:app)
|> Atom.to_string
|> Mix.Phoenix.inflect
base = bin_opts[:module] || binding[:base]
proj_struct = to_atom(opts[:proj_struct] || detect_project_structure())
concern_path = concern_path(concern)
view_opts =
%{target_name: target_name, base: base, concern: concern,
concern_path: concern_path}
|> view_opts(proj_struct)
app = opts[:app_name] || Mix.Project.config |> Keyword.fetch!(:app)
app_path_name = app |> to_string |> Inflex.underscore
root_path = opts[:root_path] || Path.join(["lib", app_path_name, default_root_path()])
%{
theme_name: target_name,
theme_module: target_module,
verbose: bin_opts[:verbose],
dry_run: bin_opts[:dry_run],
binding: binding,
root_path: root_path,
path_prefix: opts[:path_prefix] || default_path_prefix(),
project_structure: proj_struct,
view_opts: view_opts,
concern: concern,
concern_path: concern_path,
web_namespace: web_namespace(proj_struct),
boilerplate: bin_opts[:boilerplate] || Config.boilerplate() || true,
base: base,
}
end
defp to_atom(atom) when is_atom(atom), do: atom
defp to_atom(string), do: String.to_atom(string)
defp parse_options([], parsed) do
{[], [], parsed}
end
defp parse_options(opts, parsed) do
bin_opts = Enum.filter(opts, fn {k,_v} -> k in @boolean_options end)
{bin_opts, opts -- bin_opts, parsed}
end
defp copy_from(source_dir, target_dir, binding, mapping, config) when is_list(mapping) do
create_opts = if config[:confirm], do: [], else: [force: true]
for {format, source_file_path, target_file_path} <- mapping do
source = Path.join(source_dir, source_file_path)
unless File.exists?(source) do
raise("could not find #{source_file_path} in any of the sources")
end
target = Path.join(target_dir, target_file_path)
contents =
case format do
:text -> File.read!(source)
:eex -> EEx.eval_file(source, binding)
end
if File.exists? target do
fname = Path.split(target) |> List.last
if Mix.shell.yes?("File #{fname} exists. Replace it?"), do: true, else: false
else
true
end
|> if do
Mix.Generator.create_file(target, contents, create_opts)
end
end
end
end
| 29.951456 | 94 | 0.666775 |
ff1f97b2ef7f7b4794aab7b90cf95663c52eacf5 | 561 | ex | Elixir | lib/mbanking_web/views/changeset_view.ex | kadmohardy/mbanking | 42a45f78956eac95597e72ade10cb0291ed5aa76 | [
"MIT"
] | null | null | null | lib/mbanking_web/views/changeset_view.ex | kadmohardy/mbanking | 42a45f78956eac95597e72ade10cb0291ed5aa76 | [
"MIT"
] | null | null | null | lib/mbanking_web/views/changeset_view.ex | kadmohardy/mbanking | 42a45f78956eac95597e72ade10cb0291ed5aa76 | [
"MIT"
] | null | null | null | defmodule MbankingWeb.ChangesetView do
use MbankingWeb, :view
@doc """
Traverses and translates changeset errors.
See `Ecto.Changeset.traverse_errors/2` and
`MbankingWeb.ErrorHelpers.translate_error/1` for more details.
"""
def translate_errors(changeset) do
Ecto.Changeset.traverse_errors(changeset, &translate_error/1)
end
def render("error.json", %{changeset: changeset}) do
# When encoded, the changeset returns its errors
# as a JSON object. So we just pass it forward.
%{errors: translate_errors(changeset)}
end
end
| 28.05 | 65 | 0.73975 |
ff1ff02ad327957850407c409ec8207ca7861bd8 | 929 | ex | Elixir | lib/canvas/resources/user.ex | acu-online/canvas | 594e435abe8907097a3a66d25627e96cb940b07a | [
"MIT"
] | null | null | null | lib/canvas/resources/user.ex | acu-online/canvas | 594e435abe8907097a3a66d25627e96cb940b07a | [
"MIT"
] | null | null | null | lib/canvas/resources/user.ex | acu-online/canvas | 594e435abe8907097a3a66d25627e96cb940b07a | [
"MIT"
] | null | null | null | defmodule Canvas.Resources.User do
@moduledoc """
Represents an User.
See:
- https://canvas.instructure.com/doc/api/users
- https://canvas.instructure.com/doc/api/users#User
"""
@type t :: %__MODULE__{
id: integer,
name: String.t(),
sortable_name: String.t(),
short_name: String.t(),
sis_user_id: String.t(),
sis_import_id: integer,
integration_id: String.t(),
login_id: String.t(),
avatar_url: String.t(),
enrollments: [Canvas.Resources.Enrollment.t()],
email: String.t(),
locale: String.t(),
last_login: String.t(),
time_zone: String.t(),
bio: String.t()
}
defstruct ~w(id name sortable_name short_name sis_user_id sis_import_id
integration_id login_id avatar_url enrollments email locale
last_login time_zone bio)a
end
| 29.03125 | 74 | 0.586652 |
ff201812345f6b83721c010ca486f673954c3477 | 2,355 | exs | Elixir | 09-ping.exs | alexdaube/30-days-of-elixir | a5e12805be61d8e39f1e500fcb99930799368723 | [
"MIT"
] | null | null | null | 09-ping.exs | alexdaube/30-days-of-elixir | a5e12805be61d8e39f1e500fcb99930799368723 | [
"MIT"
] | null | null | null | 09-ping.exs | alexdaube/30-days-of-elixir | a5e12805be61d8e39f1e500fcb99930799368723 | [
"MIT"
] | null | null | null | # NOTE: I just recently tried running this on my Mac OS machine.
# Unfortunately, there seems to be a problem spawning 254 `ping` commands. I'm not sure what to do.
# If you want to see this script run on Mac, change line 54 to 1..100 so you can see something happen. :-/
defmodule Ping do
@moduledoc """
Ping a class-C subnet to find hosts that respond.
To run from the command line:
$ elixir 09-ping.exs 192.168.1.x
"""
@doc """
Ping an IP asynchronously and send the {ip, exists} tuple to the parent
"""
def ping_async(ip, parent) do
send parent, {ip, ping(ip)}
end
@doc """
Ping a single IP address and return true if there is a response."
"""
def ping(ip) do
result = System.cmd(ping_cmd(ip))
not Regex.match?(~r/100(\.0)?% packet loss/, result)
end
def ping_cmd(ip) do
"ping -c 1 #{if darwin?, do: '-W', else: '-w'} 5 -s 1 #{ip}"
end
def darwin? do
System.cmd("uname") |> String.strip == "Darwin"
end
end
defmodule Subnet do
@doc """
Ping all IPs in a class-C subnet and return a Dict with results.
"""
def ping(subnet) do
all = ips(subnet)
Enum.each all, fn ip ->
Process.spawn(Ping, :ping_async, [ip, self])
end
wait HashDict.new, Enum.count(all)
end
@doc """
Given a class-C subnet string like '192.168.1.x', return list of all 254 IPs therein.
"""
def ips(subnet) do
subnet = Regex.run(~r/^\d+\.\d+\.\d+\./, subnet) |> Enum.at(0)
Enum.to_list(1..254) |> Enum.map fn i -> "#{subnet}#{i}" end
end
defp wait(dict, 0), do: dict
defp wait(dict, remaining) do
receive do
{ip, exists} ->
dict = Dict.put(dict, ip, exists)
wait dict, remaining-1
end
end
end
# Command-line execution support
# TODO is there a way to check if this script is being executed directly (vs imported elsewhere)?
case System.argv do
[subnet] ->
results = Subnet.ping(subnet)
Enum.filter_map(results, fn {_ip, exists} -> exists end, fn {ip, _} -> ip end)
|> Enum.sort
|> Enum.join("\n")
|> IO.puts
_ ->
ExUnit.start
defmodule SubnetTest do
use ExUnit.Case
test "ips" do
ips = Subnet.ips("192.168.1.x")
assert Enum.count(ips) == 254
assert Enum.at(ips, 0) == "192.168.1.1"
assert Enum.at(ips, 253) == "192.168.1.254"
end
end
end
| 26.166667 | 106 | 0.614437 |
ff201ddca740a72e0530a2fc8944503dfb979f8e | 1,027 | ex | Elixir | lib/credo/cli/output/formatter/json.ex | cblage/credo | 00d0c4875c293d992a3084d2fa1dee36dc71a235 | [
"MIT"
] | null | null | null | lib/credo/cli/output/formatter/json.ex | cblage/credo | 00d0c4875c293d992a3084d2fa1dee36dc71a235 | [
"MIT"
] | null | null | null | lib/credo/cli/output/formatter/json.ex | cblage/credo | 00d0c4875c293d992a3084d2fa1dee36dc71a235 | [
"MIT"
] | null | null | null | defmodule Credo.CLI.Output.Formatter.JSON do
alias Credo.Issue
alias Credo.CLI.Output.UI
def print_issues(issues) do
%{
"issues" => Enum.map(issues, &to_json/1)
}
|> print_map()
end
def print_map(map) do
UI.puts(JSON.encode!(map))
end
def to_json(
%Issue{
check: check,
category: category,
message: message,
filename: filename,
priority: priority
} = issue
) do
check_name =
check
|> to_string()
|> String.replace(~r/^(Elixir\.)/, "")
column_end =
if issue.column && issue.trigger do
issue.column + String.length(to_string(issue.trigger))
end
%{
"check" => check_name,
"category" => to_string(category),
"filename" => to_string(filename),
"line_no" => issue.line_no,
"column" => issue.column,
"column_end" => column_end,
"trigger" => issue.trigger,
"message" => message,
"priority" => priority
}
end
end
| 21.395833 | 62 | 0.557936 |
ff2022ab9a197a668c45c4ca8823057d5e1e688c | 1,044 | exs | Elixir | mix.exs | thiamsantos/spandex_redix | 4bceef03410b0282a780ad602910fdebc2a9ff66 | [
"Apache-2.0"
] | 1 | 2020-02-15T23:17:51.000Z | 2020-02-15T23:17:51.000Z | mix.exs | thiamsantos/spandex_redix | 4bceef03410b0282a780ad602910fdebc2a9ff66 | [
"Apache-2.0"
] | null | null | null | mix.exs | thiamsantos/spandex_redix | 4bceef03410b0282a780ad602910fdebc2a9ff66 | [
"Apache-2.0"
] | null | null | null | defmodule SpandexRedix.MixProject do
use Mix.Project
def project do
[
app: :spandex_redix,
version: "0.1.0",
elixir: "~> 1.7",
start_permanent: Mix.env() == :prod,
deps: deps(),
description: "Tracing integration between redix and spandex",
package: package(),
name: "SpandexRedix",
docs: docs()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
defp package do
[
maintainers: ["Thiago Santos"],
licenses: ["Apache 2.0"],
links: %{"GitHub" => "https://github.com/thiamsantos/spandex_redix"}
]
end
defp docs do
[
main: "SpandexRedix",
source_url: "https://github.com/thiamsantos/spandex_redix"
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:spandex, "~> 2.2", optional: true},
{:mox, "~> 0.5", only: :test},
{:ex_doc, "~> 0.21.3", only: :dev, runtime: false}
]
end
end
| 21.306122 | 74 | 0.576628 |
ff202833fd1f7f1d373c127c442be9c4ff8fab9a | 61,169 | ex | Elixir | lib/simple_pool/v1/monitoring_framework/environment_manager/pool.ex | noizu/simple_pool | 59251a3391ff82152a31626072955b95f83c18ee | [
"MIT"
] | null | null | null | lib/simple_pool/v1/monitoring_framework/environment_manager/pool.ex | noizu/simple_pool | 59251a3391ff82152a31626072955b95f83c18ee | [
"MIT"
] | null | null | null | lib/simple_pool/v1/monitoring_framework/environment_manager/pool.ex | noizu/simple_pool | 59251a3391ff82152a31626072955b95f83c18ee | [
"MIT"
] | null | null | null | #-------------------------------------------------------------------------------
# Author: Keith Brings
# Copyright (C) 2018 Noizu Labs, Inc. All rights reserved.
#-------------------------------------------------------------------------------
defmodule Noizu.MonitoringFramework.EnvironmentPool do
#alias Noizu.Scaffolding.CallingContext
use Noizu.SimplePool.Behaviour,
default_modules: [:pool_supervisor, :worker_supervisor],
worker_state_entity: Noizu.MonitoringFramework.EnvironmentWorkerEntity,
verbose: false
use Amnesia
use Noizu.SimplePool.Database.MonitoringFramework.NodeTable
defmodule Worker do
@vsn 1.0
use Noizu.SimplePool.WorkerBehaviour,
worker_state_entity: Noizu.MonitoringFramework.EnvironmentWorkerEntity,
verbose: false
require Logger
end # end worker
#=============================================================================
# @Server
#=============================================================================
defmodule Server do
@doc """
Responsible for system wide management of services, worker migration and other critical tasks.
"""
@vsn 1.0
#@monitor_ms 60 * 5 * 1000
@master_node_cache_key :"noizu:active_master_node"
alias Noizu.SimplePool.Server.State
alias Noizu.SimplePool.Server.EnvironmentDetails
use Noizu.SimplePool.ServerBehaviour,
worker_state_entity: Noizu.MonitoringFramework.EnvironmentWorkerEntity,
override: [:init]
@behaviour Noizu.SimplePool.MonitoringFramework.MonitorBehaviour
#================================================
# LifeCycle Methods
#================================================
#--------------------------------------
#
#--------------------------------------
def init([_sup, definition, context] = _args) do
if verbose() do
Logger.info(fn -> {base().banner("INIT #{__MODULE__} (#{inspect Noizu.MonitoringFramework.EnvironmentPool.WorkerSupervisor}@#{inspect self()})"), Noizu.ElixirCore.CallingContext.metadata(context) } end)
end
# @TODO load real effective PRI-1
effective = nil
Logger.info fn() -> {"INIT #{inspect definition}", Noizu.ElixirCore.CallingContext.metadata(context)} end
state = %State{
worker_supervisor: Noizu.MonitoringFramework.EnvironmentPool.WorkerSupervisor, # @TODO should be worker_supervisor
service: Noizu.MonitoringFramework.EnvironmentPool.Server, # @TODO should be service
status_details: :pending,
extended: %{monitors: %{}, watchers: %{}},
options: option_settings(),
environment_details: %EnvironmentDetails{
server: node(),
definition: definition,
initial: definition && definition.server_options && definition.server_options.initial || %{},
effective: effective || definition && definition.server_options && definition.server_options.initial || %{},
default: nil,
status: :offline,
monitors: %{}
}
}
enable_server!()
{:ok, state}
end
#================================================
# Convenience Methods
#================================================
#--------------------------------------
#
#--------------------------------------
def register(initial, context, options \\ %{}) do
GenServer.call(__MODULE__, {:m, {:register, initial, options}, context}, 60_000)
end
#--------------------------------------
#
#--------------------------------------
def start_services(context, options \\ %{}) do
GenServer.cast(__MODULE__, {:m, {:start_services, options}, context})
end
#--------------------------------------
#
#--------------------------------------
def update_hints!(context, options \\ %{}) do
internal_system_cast({:update_hints, options}, context)
end
#--------------------------------------
#
#--------------------------------------
def internal_update_hints(components, context, options \\ %{})
def internal_update_hints(:all, context, options) do
internal_system_cast({:prep_hint_update, :all, options}, context, options)
end
#--------------------------------------
#
#--------------------------------------
def internal_update_hints(%MapSet{} = components, context, options) do
internal_system_cast({:prep_hint_update, components, options}, context, options)
end
#--------------------------------------
#
#--------------------------------------
def internal_update_hints(effective, context, options) do
remote_system_cast(master_node(effective), {:hint_update, effective, options}, context, options)
end
#--------------------------------------
#
#--------------------------------------
def fetch_internal_state(server, context, options) do
:rpc.call(server, __MODULE__, :fetch_internal_state, [context, options])
end
def fetch_internal_state(context, options) do
internal_system_call({:fetch_internal_state, options}, context, options)
end
#--------------------------------------
#
#--------------------------------------
def set_internal_state(server, state, context, options) do
:rpc.call(server, __MODULE__, :set_internal_state, [state, context, options])
end
def set_internal_state(state, context, options) do
internal_system_call({:set_internal_state, state, options}, context, options)
end
#--------------------------------------
#
#--------------------------------------
def update_master_node(master_node, context, options) do
spawn fn ->
# Clear FastGlobal Cache
FastGlobal.put(@master_node_cache_key, master_node)
# Update Database Entry
Noizu.SimplePool.Database.MonitoringFramework.SettingTable.delete!(:environment_master)
%Noizu.SimplePool.Database.MonitoringFramework.SettingTable{setting: :environment_master, value: master_node}
|> Noizu.SimplePool.Database.MonitoringFramework.SettingTable.write!()
Node.list()
|> Task.async_stream(&(:rpc.cast(&1, FastGlobal, :put, [@master_node_cache_key, master_node])))
end
# Call servers to update state entities
call = {:update_master_node, master_node, options}
internal_system_cast(call, context, options)
spawn fn ->
Node.list()
|> Task.async_stream(&( :rpc.cast(&1, __MODULE__, :internal_system_cast, [call, context, options])))
end
master_node
end
#============================================================================
# Noizu.SimplePool.MonitoringFramework.MonitorBehaviour Implementation
#============================================================================
#--------------------------------------
#
#--------------------------------------
def supports_service?(server, component, _context, options \\ %{}) do
_effective = case Noizu.SimplePool.Database.MonitoringFramework.NodeTable.read!(server) do
nil -> :nack
v ->
if v.entity.services[component] do
case v.entity.services[component].status do
:online -> :ack
:degraded -> :ack
:critical -> :ack
_s ->
if options[:system_call] do
:ack
else
:nack
end
end
else
:nack
end
end
end
#--------------------------------------
#
#--------------------------------------
def optomized_rebalance(input_server_list, output_server_list, component_set, context, options \\ %{}) do
pfs = profile_start(%{}, :optomized_rebalance)
#--------------------
# 1. Data Setup
#--------------------
pfs = profile_start(pfs, :data_setup)
await_timeout = options[:wait] || 60_000
bulk_await_timeout = options[:bulk_wait] || 1_000_000
# Services
component_list = MapSet.to_list(component_set)
service_list = Enum.map(component_list, fn(c) -> Module.concat([c, "Server"]) end)
# Servers
pool = Enum.uniq(input_server_list ++ output_server_list)
pfs = profile_end(pfs, :data_setup)
#------------------------------------------------------
# 2. Asynchronously grab Server rules and worker lists.
#------------------------------------------------------
pfs = profile_start(pfs, :fetch_worker_list)
htasks = Task.async_stream(pool, fn(server) -> {server, server_health_check!(server, context, options)} end, timeout: await_timeout)
wtasks = pool
|> Enum.map(fn(server) -> Enum.map(service_list, fn(service) -> {server, service} end) end)
|> List.flatten()
|> Task.async_stream(fn({server, service}) -> {server, {service, service.workers!(server, context)}} end, timeout: await_timeout)
{server_health, _server_health_errors} = Enum.reduce(htasks, {%{}, %{}}, fn(outcome, {acc, ecc}) ->
case outcome do
{:ok, {server, {:ack, h}}} -> {put_in(acc, [server], h), ecc}
{:ok, {server, error}} -> {acc, put_in(ecc, [server], error)}
e -> {acc, update_in(ecc, [:unknown], &((&1 || []) ++ [e]))}
end
end)
{service_workers, _service_workers_errors, _i} = Enum.reduce(wtasks, {%{}, %{}, 0}, fn(outcome, {acc, ecc, i}) ->
case outcome do
{:ok, {server, {service, {:ack, workers}}}} -> {put_in(acc, [{server, service}], %{workers: workers, allocation: length(workers)}), ecc, i + 1}
{:ok, {server, {service, error}}} -> {acc, put_in(ecc, [{server, service}], error), i + 1}
e -> {acc, update_in(ecc, [:unknown], &((&1 || []) ++ [{i, e}])), i + 1}
end
end)
pfs = profile_end(pfs, :fetch_worker_list, %{info: 30_000, warn: 60_000, error: 90_000})
#------------------------------------------------------------------
# 3. Prepare per server.service allocation and target details.
#------------------------------------------------------------------
pfs = profile_start(pfs, :service_allocation)
component_service_map = Enum.reduce(component_list, %{}, fn(x, acc) -> put_in(acc, [x], Module.concat([x, "Server"])) end)
per_server_targets = Enum.reduce(server_health, %{}, fn({server, rules}, acc) ->
case rules do
%Noizu.SimplePool.MonitoringFramework.Server.HealthCheck{} ->
server_targets = Enum.reduce(component_list, %{}, fn(component, sa) ->
case rules.services[component] do
sr = %Noizu.SimplePool.MonitoringFramework.Service.HealthCheck{} ->
# @TODO refactor out need to do this. pri2
put_in(sa, [component_service_map[component]], %{target: sr.definition.target, soft: sr.definition.soft_limit, hard: sr.definition.hard_limit})
_ -> sa
end
end)
put_in(acc, [server], server_targets)
_ -> acc
end
end)
# 5. Calculate target allocation
service_allocation = Enum.reduce(Map.keys(service_workers), %{}, fn({server, service} = k, acc) ->
v = service_workers[k].allocation
update_in(acc, [server], fn(p) -> p && put_in(p, [service], v) || %{service => v} end)
end)
{_outcome, target_allocation} = optimize_balance(input_server_list, output_server_list, service_list, per_server_targets, service_allocation)
# include all services for any servers not in target allocation
pull_servers = input_server_list -- output_server_list
unallocated = Enum.reduce(service_list, %{}, fn(service, acc) -> put_in(acc, [service], Enum.map([pull_servers], fn(server) -> service_workers[{server, service}][:workers] || [] end) |> List.flatten()) end)
# first pass, strip overages into general pull
{additional_unallocated, target_allocation} = Enum.reduce(target_allocation, {%{}, target_allocation}, fn({server, v}, {u, wa}) ->
Enum.reduce(v, {u, wa}, fn({service, target}, {u2, wa2}) ->
# 1. Grab any currently allocated.
allocation = service_workers[{server, service}][:allocation]
cond do
allocation == nil -> {u2, wa2}
allocation < target ->
{u2, put_in(wa2, [server, service], (target - allocation))}
true ->
{_l, r} = Enum.split(service_workers[{server, service}][:workers], target)
m_u2 = update_in(u2, [service], &((&1 || []) ++ r))
m_wa2 = put_in(wa2, [server, service], 0)
{m_u2, m_wa2}
end
end)
end)
# second pass -> assign
final_allocation = Enum.reduce(service_list, %{}, fn(service, acc) ->
su = (unallocated[service] || []) ++ (additional_unallocated[service] || [])
{_u, uacc} = Enum.reduce(target_allocation, {su, acc}, fn({server, v}, {u, wa}) ->
cond do
u == nil || u == [] -> {u, wa}
v[service] > 0 ->
{l, r} = Enum.split(u, v[service])
{r, put_in(wa, [{server, service}], l)}
true -> {u, wa}
end
end)
uacc
end)
pfs = profile_end(pfs, :service_allocation, %{info: 30_000, warn: 60_000, error: 90_000})
#------------
# Dispatch
#----------------
pfs = profile_start(pfs, :dispatch)
# @TODO third pass - group by origin server.service
broadcast_grouping = Enum.reduce(final_allocation, %{}, fn({{server, service}, workers}, acc) ->
Enum.reduce(workers, acc, fn(worker, acc) ->
cond do
acc[worker.server][service][server] -> update_in(acc, [worker.server, service, server], &(&1 ++ [worker.identifier]))
acc[worker.server][service] -> put_in(acc, [worker.server, service, server], [worker.identifier])
acc[worker.server] -> put_in(acc, [worker.server, service], %{server => [worker.identifier]})
true -> put_in(acc, [worker.server], %{service => %{server => [worker.identifier]}})
end
end)
end)
tasks = Task.async_stream(broadcast_grouping, fn({server, services}) -> {server, :rpc.call(server, __MODULE__, :server_bulk_migrate!, [services, context, options], bulk_await_timeout)} end, timeout: bulk_await_timeout)
r = Enum.map(tasks, &(&1))
#r = []
pfs = profile_end(pfs, :dispatch, %{info: 30_000, warn: 60_000, error: 90_000})
pfs = profile_end(pfs, :optomized_rebalance, %{info: 30_000, warn: 60_000, error: 90_000})
{:ack, {r, pfs}}
end
#--------------------------------------
#
#--------------------------------------
def rebalance(input_server_list, output_server_list, component_set, context, options \\ %{}) do
# 1. Data Setup
await_timeout = options[:wait] || 60_000
bulk_await_timeout = options[:bulk_wait] || 1_000_000
# Services
cl = MapSet.to_list(component_set)
service_list = Enum.reduce(cl, [], fn(c, acc) -> acc ++ [Module.concat([c, "Server"])] end)
# Servers
pool = Enum.uniq(input_server_list ++ output_server_list)
# 2. Asynchronously grab Server rules and worker lists.
htasks = Enum.reduce(pool, [], fn(server, acc) -> acc ++ [Task.async(fn -> {server, server_health_check!(server, context, options)} end)] end)
wtasks = Enum.reduce(pool, [], fn(server, acc) -> Enum.reduce(service_list, acc, fn(service, a2) -> a2 ++ [Task.async(fn -> {server, {service, service.workers!(server, context)}} end)] end) end)
server_health = Enum.reduce(htasks, %{}, fn(task, acc) ->
case Task.await(task, await_timeout) do
{server, {:ack, h}} -> put_in(acc, [server], h)
{server, error} -> put_in(acc, [server], error)
_ -> acc
end
end)
service_workers = Enum.reduce(wtasks, %{}, fn(task, acc) ->
t = Task.await(task, await_timeout)
case t do
{server, {service, {:ack, workers}}} ->
update_in(acc, [server], &( &1 && put_in(&1, [service], workers) || %{service => workers}))
{server, {service, error}} -> update_in(acc, [server], &( &1 && put_in(&1, [service], error) || %{service => error}))
_ -> acc
end
end)
# 3. Prepare per server.service allocation and target details.
service_allocation = Enum.reduce(service_workers, %{}, fn({k,v}, acc) ->
acc = put_in(acc, [k], %{})
Enum.reduce(v, acc, fn({k2, v2}, a2) -> put_in(a2, [k, k2], length(v2)) end)
end)
# 4. Calculate total available Target, Soft and Hard Limit for each service.
per_server_targets = Enum.reduce(cl, %{}, fn(component, acc) ->
Enum.reduce(server_health, acc, fn({server, rules}, a2) ->
case rules do
%Noizu.SimplePool.MonitoringFramework.Server.HealthCheck{} ->
case rules.services[component] do
sr = %Noizu.SimplePool.MonitoringFramework.Service.HealthCheck{} ->
# @TODO refactor out need to do this. pri2
service = Module.concat([component, "Server"])
a2
|> update_in([server], &(&1 || %{}))
|> put_in([server, service], %{target: sr.definition.target, soft: sr.definition.soft_limit, hard: sr.definition.hard_limit})
_ -> a2
end
_ -> a2
end
end)
end)
if false do
IO.puts """
cl = #{inspect cl, pretty: true, limit: :infinity}
-----------------
services = #{inspect service_list, pretty: true, limit: :infinity}
-----------------
server_health = #{inspect server_health, pretty: true, limit: 15}
-----------------
service_workers = #{inspect service_workers, pretty: true}
-------------
service_allocation = #{inspect service_allocation, pretty: true, limit: :infinity}
--------------
per_server_targets = #{inspect per_server_targets, pretty: true, limit: :infinity}
"""
end
# 5. Calculate target allocation
{outcome, target_allocation} = optimize_balance(input_server_list, output_server_list, service_list, per_server_targets, service_allocation)
if false do
IO.puts """
---------------------
outcome = #{inspect outcome}
---------------------------
target_allocation = #{inspect target_allocation}
"""
end
# include all services for any servers not in target allocation
unallocated = Enum.reduce(pool, %{}, fn(server, acc) ->
Enum.reduce(service_list, acc, fn(service, acc) ->
cond do
target_allocation[server] == nil && service_workers[server][service] -> update_in(acc, [service], &((&1 || []) ++ service_workers[server][service]))
true -> acc
end
end)
end)
# first pass, strip overages into general pull
{unallocated, target_allocation} = Enum.reduce(target_allocation, {unallocated, target_allocation}, fn({server, v}, {u, wa}) ->
Enum.reduce(v, {u, wa}, fn({service, target}, {u2, wa2}) ->
# 1. Grab any currently allocated.
case service_workers[server][service] do
nil -> {u2, wa2}
workers ->
{l, r} = Enum.split(workers, target)
m_u2 = update_in(u2, [service], &((&1 || []) ++ r))
m_wa2 = put_in(wa2, [server, service], (target - length(l)))
{m_u2, m_wa2}
end
end)
end)
# second pass -> assign
{_unallocated, final_allocation} = Enum.reduce(target_allocation, {unallocated, %{}}, fn({server, v}, {u, wa}) ->
Enum.reduce(v, {u, wa}, fn({service, target}, {u2, wa2}) ->
case u2[service] do
nil -> {u2, wa2}
workers ->
{l, r} = Enum.split(workers, target)
m_u2 = put_in(u2, [service], r)
m_wa2 = wa2
|> update_in([server], &(&1 || %{}))
|> put_in([server, service], l)
{m_u2, m_wa2}
end
end)
end)
# @TODO third pass - group by origin server.service
broadcast_grouping = Enum.reduce(final_allocation, %{}, fn({server, v}, acc) ->
Enum.reduce(v, acc, fn({service, workers}, a2) ->
Enum.reduce(workers, a2, fn(dispatch, a3) ->
a3
|> update_in([dispatch.server], &(&1 || %{}))
|> update_in([dispatch.server, service], &(&1 || %{}))
|> update_in([dispatch.server, service, server], &((&1 || []) ++ [dispatch.identifier]))
end)
end)
end)
tasks = Enum.reduce(broadcast_grouping, [], fn({server, services}, acc) ->
acc ++ [Task.async(fn -> {server, :rpc.call(server, __MODULE__, :server_bulk_migrate!, [services, context, options], bulk_await_timeout)} end)]
end)
{:ack, Enum.reduce(tasks, %{}, fn(task, acc) ->
case Task.await(task, await_timeout) do
{service, {:ack, outcome}} ->
o = Enum.reduce(outcome, [],
fn({_k, {:ack, v}} = _x ,a) ->
cond do
options[:return_worrkers] ->
process_map = for {server, v2} <- v do
for e <- v2 do
case e do
{:ok, {ref, {:ack, pid}}} -> [{server, ref, pid}]
_ -> {server, :error}
end
end
end |> List.flatten
a ++ process_map
true -> a ++ Map.keys(v)
end
end)
put_in(acc, [service], o)
_ -> acc
end
end)
}
end
#--------------------------------------
#
#--------------------------------------
def server_bulk_migrate!(services, context, options) do
await_timeout = options[:wait] || 1_000_000
tasks = Enum.reduce(services, [], fn({service, transfer_servers}, acc) ->
acc ++ [Task.async(fn -> {service, service.bulk_migrate!(transfer_servers, context, options)} end)]
end)
r = Enum.reduce(tasks, %{}, fn(task, acc) ->
{service, outcome} = Task.await(task, await_timeout)
put_in(acc, [service], outcome)
end)
{:ack, r}
end
#--------------------------------------
#
#--------------------------------------
def optimize_balance(input_server_list, output_server_list, service_list, per_server_targets, service_allocation) do
input_server_set = MapSet.new(input_server_list)
output_server_set = MapSet.new(output_server_list)
# 1. Calculate unallocated
{unallocated, service_allocation} = Enum.reduce(service_allocation, {%{}, service_allocation}, fn ({server, _services}, {u,sa}) ->
cond do
!MapSet.member?(input_server_set, server) && MapSet.member?(output_server_set, server) -> {u, sa}
true ->
{w, m_sa} = pop_in(sa, [server])
m_u = Enum.reduce(w, u, fn({service, worker_count}, acc) -> update_in(acc, [service], &((&1 || 0) + worker_count)) end)
{m_u, m_sa}
end
end)
# 2. Fill up to target
{unallocated, service_allocation} = fill_to({unallocated, service_allocation}, :target, output_server_list, service_list, per_server_targets)
r = total_unallocated(unallocated)
cond do
r == 0 -> {:target, service_allocation}
true -> # 3. Fill up to soft_limit
{unallocated, service_allocation} = fill_to({unallocated, service_allocation}, :soft_limit, output_server_list, service_list, per_server_targets)
r2 = total_unallocated(unallocated)
cond do
r2 == 0 -> {:soft_limit, service_allocation}
true -> # 3. Fill up to hard_limit
{unallocated, service_allocation} = fill_to({unallocated, service_allocation}, :hard_limit, output_server_list, service_list, per_server_targets)
r3 = total_unallocated(unallocated)
cond do
r3 == 0 -> {:hard_limit, service_allocation}
true -> # 4. overflow!
{unallocated, service_allocation} = fill_to({unallocated, service_allocation}, :overflow, output_server_list, service_list, per_server_targets)
r4 = total_unallocated(unallocated)
cond do
r4 < 0 -> {{:error, :critical_bug}, service_allocation}
r4 == 0 -> {:overflow, service_allocation}
r4 > 0 ->
# @TODO add check for unassignable services higher in logic. (we can check as soon as we output server health checks)
{{:error, :unassignable_services}, service_allocation}
end
end
end
end
end
#--------------------------------------
#
#--------------------------------------
def lock_server(context), do: lock_servers([node()], :all, context, %{})
def lock_server(context, options), do: lock_servers([node()], :all, context, options)
def lock_server(components, context, options), do: lock_servers([node()], components, context, options)
def lock_server(server, components, context, options), do: lock_servers([server], components, context, options)
def lock_servers(servers, components, context, options \\ %{}) do
await_timeout = options[:wait] || 60_000
options_b = Map.has_key?(options, :update_hints) && options || put_in(options, [:update_hints], false)
locks = for server <- servers, do: Task.async(fn -> remote_call(server, {:lock_server, components, options_b}, context, options_b) end)
for lock <- locks, do: Task.await(lock, await_timeout)
if Map.get(options, :update_hints, true), do: internal_update_hints(components, context, options)
:ack
end
#--------------------------------------
#
#--------------------------------------
def release_server(context), do: release_servers([node()], :all, context, %{})
def release_server(context, options), do: release_servers([node()], :all, context, options)
def release_server(components, context, options), do: release_servers([node()], components, context, options)
def release_server(server, components, context, options), do: release_servers([server], components, context, options)
def release_servers(servers, components, context, options \\ %{}) do
await_timeout = options[:wait] || 60_000
options_b = Map.has_key?(options, :update_hints) && options || put_in(options, [:update_hints], false)
locks = for server <- servers, do: Task.async(fn -> remote_call(server, {:release_server, components, options}, context, options_b) end)
for lock <- locks, do: Task.await(lock, await_timeout)
if Map.get(options, :update_hints, true), do: internal_update_hints(components, context, options)
:ack
end
#--------------------------------------
#
#--------------------------------------
def select_host(_ref, component, _context, options \\ %{}) do
case Noizu.SimplePool.Database.MonitoringFramework.Service.HintTable.read!(component) do
nil -> {:nack, :hint_required}
v ->
if v.status === %{} do
{:nack, :none_available}
else
if Enum.empty?(v.hint) do
{:nack, :none_available}
else
if options[:sticky] do
n = case options[:sticky] do
true -> node()
v -> v
end
case Enum.find(v.hint, fn({{host, _service}, _v}) -> host == n end) do
{{host, _service}, _v} -> {:ack, host}
_ ->
{{host, _service}, _v} = Enum.random(v.hint)
{:ack, host}
end
else
# TODO handle no hints, illegal format, etc.
{{host, _service}, _v} = Enum.random(v.hint)
{:ack, host}
end
end
end
end
end
#--------------------------------------
#
#--------------------------------------
def record_server_event!(server, event, details, _context, options \\ %{}) do
time = options[:time] || DateTime.utc_now()
entity = %Noizu.SimplePool.MonitoringFramework.LifeCycleEvent{
identifier: event,
time_stamp: time,
details: details
}
%Noizu.SimplePool.Database.MonitoringFramework.Node.EventTable{identifier: server, event: event, time_stamp: DateTime.to_unix(time), entity: entity}
|> Noizu.SimplePool.Database.MonitoringFramework.Node.EventTable.write!()
:ack
end
#--------------------------------------
#
#--------------------------------------
def record_service_event!(server, service, event, details, _context, options \\ %{}) do
time = options[:time] || DateTime.utc_now()
entity = %Noizu.SimplePool.MonitoringFramework.LifeCycleEvent{
identifier: event,
time_stamp: time,
details: details
}
%Noizu.SimplePool.Database.MonitoringFramework.Service.EventTable{identifier: {server, service}, event: event, time_stamp: DateTime.to_unix(time), entity: entity}
|> Noizu.SimplePool.Database.MonitoringFramework.Service.EventTable.write!()
:ack
end
#--------------------------------------
#
#--------------------------------------
def update_effective(state, context, options) do
case state.environment_details && state.environment_details.effective do
%Noizu.SimplePool.MonitoringFramework.Server.HealthCheck{} ->
await_timeout = options[:wait] || 60_000
tasks = Enum.reduce(state.environment_details.effective.services, [], fn({k,v}, acc) ->
acc ++ [Task.async( fn ->
h = v.definition.service.service_health_check!(options[:health_check_options] || %{}, context, options)
{k,h} end)]
end)
effective = Enum.reduce(tasks, state.environment_details.effective, fn(t, acc) ->
{k, v} = Task.await(t, await_timeout)
put_in(acc, [Access.key(:services), k], v)
end)
state = put_in(state, [Access.key(:environment_details), Access.key(:effective)], effective)
state
_ ->
Logger.error "#{__MODULE__}.update_effective Malformed State: #{inspect state, pretty: true, limit: :infinity}\n", Noizu.ElixirCore.CallingContext.metadata(context)
state
end
end
#--------------------------------------
#
#--------------------------------------
def server_health_check!(server, context, options) do
:rpc.call(server, __MODULE__, :server_health_check!, [context, options])
end
#--------------------------------------
#
#--------------------------------------
def server_health_check!(context, options) do
internal_system_call({:node_health_check!, options[:node_health_check!] || %{}}, context, options)
end
#--------------------------------------
#
#--------------------------------------
def node_health_check!(context, options) do
server_health_check!(context, options)
end
#================================================
# Call Handlers
#================================================
#--------------------------------------
#
#--------------------------------------
def handle_cast({:i, {:update_hints, options}, context}, state) do
internal_update_hints(state.environment_details.effective, context, options)
{:noreply, state}
end
#--------------------------------------
#
#--------------------------------------
def handle_cast({:i, {:prep_hint_update, components, options}, context}, state) do
remote_system_cast(master_node(state), {:hint_update, components, options}, context)
{:noreply, state}
end
#--------------------------------------
#
#--------------------------------------
def handle_cast({:i, {:hint_update, components, options}, context}, state) do
perform_hint_update(state, components, context, options)
end
#--------------------------------------
# perform_join | Noizu.SimplePool.MonitoringFramework.MonitorBehaviour Implementation
#--------------------------------------
def perform_join(state, server, {pid, _ref}, initial, _context, options) do
effective = cond do
options[:update_node] -> initial
true ->
case Noizu.SimplePool.Database.MonitoringFramework.NodeTable.read!(server) do
nil -> initial
v = %Noizu.SimplePool.Database.MonitoringFramework.NodeTable{} -> v.entity
end
end
effective = put_in(effective, [Access.key(:master_node)], node())
_s = %Noizu.SimplePool.Database.MonitoringFramework.NodeTable{
identifier: effective.identifier,
status: effective.status,
directive: effective.directive,
health_index: effective.health_index,
entity: effective
} |> Noizu.SimplePool.Database.MonitoringFramework.NodeTable.write!()
monitor_ref = Process.monitor(pid)
put_in(state, [Access.key(:extended), Access.key(:monitors), server], monitor_ref)
{:reply, effective, state}
end
#--------------------------------------
#
#--------------------------------------
def perform_hint_update(state, components, context, options) do
await_timeout = options[:wait] || 60_000
# call each service node to get current health checks.
# 1. Grab nodes
servers_raw = Amnesia.Fragment.async(fn ->
Noizu.SimplePool.Database.MonitoringFramework.NodeTable.where(1 == 1) |> Amnesia.Selection.values
end)
state = try do
update_effective(state, context, options)
rescue _e -> state
end
# 2. Grab Server Status
tasks = Enum.reduce(servers_raw, [], fn(x, acc) ->
if (x.identifier != node()) do
task = Task.async( fn ->
{x.identifier, server_health_check!(x.identifier, context, options)}
end)
acc ++ [task]
else
task = Task.async( fn -> {x.identifier, {:ack, state.environment_details.effective}} end)
acc ++ [task]
end
end)
servers = Enum.reduce(tasks, %{}, fn (task, acc) ->
case Task.await(task, await_timeout) do
{k, {:ack, v}} -> Map.put(acc, k, v)
_ -> acc
end
end)
# 3. Calculate Hints
valid_status = MapSet.new([:online, :degraded, :critical])
valid_status_weight = %{online: 1, degraded: 2, critical: 3}
update = case components do
%MapSet{} -> MapSet.to_list(components)
:all -> Enum.reduce(servers_raw, MapSet.new([]), fn(x, acc) ->
MapSet.union(acc, MapSet.new(Map.keys(x.entity.services)))
end) |> MapSet.to_list()
%Noizu.SimplePool.MonitoringFramework.Server.HealthCheck{} -> Map.keys(components.services)
end
Enum.reduce(update, true, fn(service, _acc) ->
candidates = Enum.reduce(servers, [], fn ({_server_id, server}, acc2) ->
s = server.services[service]
cond do
MapSet.member?(valid_status, server.status) && s && MapSet.member?(valid_status, s.status) ->
acc2 ++ [%{identifier: s.identifier, server_status: server.status, server_index: server.health_index, service_status: s.status, service_index: s.health_index, index: server.health_index + s.health_index}]
true ->
acc2
end
end)
candidates = Enum.sort(candidates, fn(a,b) ->
cond do
a.server_status == b.server_status ->
cond do
b.service_status == b.service_status -> a.index < b.index
true -> valid_status_weight[a.service_status] < valid_status_weight[b.service_status]
end
true -> valid_status_weight[a.server_status] < valid_status_weight[b.server_status]
end
end)
# 1. add a minimum of 1/3rd of the nodes and all good nodes.
candidate_pool_size = length(candidates)
min_bar = max(div(candidate_pool_size, 3), 1)
# first pass, grab best servers only.
hint = Enum.reduce(candidates, [], fn(x, acc3) ->
cond do
x.server_status == :online && x.service_status == :online -> acc3 ++ [x]
true -> acc3
end
end)
# second pass, include degraded
hint = Enum.reduce(candidates -- hint, hint, fn(x,acc3) ->
cond do
length(acc3) >= min_bar -> acc3
x.server_status == :online && Enum.member?([:online, :degraded], x.service_status)-> acc3 ++ [x]
true -> acc3
end
end)
# third pass, include critical
hint = Enum.reduce(candidates -- hint, hint, fn(x,acc3) ->
cond do
length(acc3) >= min_bar -> acc3
x.server_status == :online && Enum.member?([:online, :degraded, :critical], x.service_status)-> acc3 ++ [x]
true -> acc3
end
end)
# fourth pass, any online server
hint = Enum.reduce(candidates -- hint, hint, fn(x,acc3) ->
cond do
length(acc3) >= min_bar -> acc3
x.server_status == :online -> acc3 ++ [x]
true -> acc3
end
end)
# final pass, insure minbar
hint = Enum.reduce(candidates -- hint, hint, fn(x,acc3) ->
cond do
length(acc3) >= min_bar -> acc3
true -> acc3 ++ [x]
end
end)
hint_status = Enum.reduce(hint, {:online, :online}, fn(x, {wserver, wservice}) ->
cond do
valid_status_weight[wserver] < valid_status_weight[x.server_status] -> {x.server_status, x.service_status}
valid_status_weight[wserver] == valid_status_weight[x.server_status] && valid_status_weight[wservice] < valid_status_weight[x.service_status] -> {x.server_status, x.service_status}
true -> {wserver, wservice}
end
end)
hints = Enum.reduce(hint, %{}, fn(x,acc) -> Map.put(acc, x.identifier, x.index) end)
%Noizu.SimplePool.Database.MonitoringFramework.Service.HintTable{identifier: service, hint: hints, time_stamp: DateTime.utc_now, status: hint_status}
|> Noizu.SimplePool.Database.MonitoringFramework.Service.HintTable.write!()
:ok
end)
# 3. Update Service Hints.
{:noreply, state}
end
#--------------------------------------
#
#--------------------------------------
def handle_cast({:i, {:update_master_node, master_node, _options}, _context}, state) do
state = cond do
state == nil -> state
state.environment_details == nil -> state
state.environment_details.effective == nil -> state
true ->
put_in(state, [Access.key(:environment_details), Access.key(:effective), Access.key(:master_node)], master_node)
end
{:noreply, state}
end
#--------------------------------------
#
#--------------------------------------
def handle_cast({:m, {:start_services, options}, context}, state) do
await_timeout = options[:wait] || 60_000
monitors = Enum.reduce(state.environment_details.effective.services, %{}, fn({k,v}, acc) ->
{:ok, sup_pid} = v.definition.supervisor.start_link(context, v.definition)
m = Process.monitor(sup_pid)
Map.put(acc, k, m)
end)
tasks = Enum.reduce(state.environment_details.effective.services, [], fn({k,v}, acc) ->
acc ++ [Task.async( fn ->
h = v.definition.service.service_health_check!(options[:health_check_options] || %{}, context, options)
{k,h} end)]
end)
state = Enum.reduce(tasks, state, fn(t, acc) ->
{k, v} = Task.await(t, await_timeout)
Process.sleep(1_000)
case v do
%Noizu.SimplePool.MonitoringFramework.Service.HealthCheck{} ->
acc
|> put_in([Access.key(:environment_details), Access.key(:effective), Access.key(:services), k], v)
e ->
Logger.error("#{node()} - Service Startup Error #{inspect k} - #{inspect e}", Noizu.ElixirCore.CallingContext.metadata(context))
acc
|> put_in([Access.key(:environment_details), Access.key(:effective), Access.key(:services), k, Access.key(:status)], :error)
end
end)
state = state
|> put_in([Access.key(:environment_details), Access.key(:effective), Access.key(:status)], :online)
|> put_in([Access.key(:environment_details), Access.key(:status)], :online)
|> put_in([Access.key(:environment_details), Access.key(:monitors)], monitors)
%Noizu.SimplePool.Database.MonitoringFramework.NodeTable{
identifier: state.environment_details.effective.identifier,
status: state.environment_details.effective.status,
directive: state.environment_details.effective.directive,
health_index: state.environment_details.effective.health_index,
entity: state.environment_details.effective
} |> Noizu.SimplePool.Database.MonitoringFramework.NodeTable.write!()
internal_update_hints(state.environment_details.effective, context, options)
{:noreply, state}
end
#--------------------------------------
#
#--------------------------------------
def handle_call({:i, {:fetch_internal_state, _options}, _context}, _from, state) do
{:reply, state, state, :hibernate}
end
#--------------------------------------
#
#--------------------------------------
def handle_call({:i, {:set_internal_state, update, _options}, _context}, _from, state) do
{:reply, %{old: state, new: update}, update, :hibernate}
end
#--------------------------------------
#
#--------------------------------------
def handle_call({:i, {:join, server, initial, options}, context}, from, state) do
perform_join(state, server, from, initial, context, options)
end
#--------------------------------------
#
#--------------------------------------
def handle_call({:m, {:register, initial, options}, context}, _from, state) do
initial = initial || state.environment_details.initial
master = cond do
v = master_node(state) ->
cond do
v == :self -> update_master_node(node(), context, %{}) # This may be problematic.
:else -> v
end
initial.master_node == :self || initial.master_node == node() -> update_master_node(node(), context, %{})
:else -> nil
end
if master do
if master == node() do
effective = cond do
options[:update_node] -> initial
true ->
case Noizu.SimplePool.Database.MonitoringFramework.NodeTable.read!(node()) do
nil -> initial
v = %Noizu.SimplePool.Database.MonitoringFramework.NodeTable{} -> v.entity
_ -> initial
end
end
effective = put_in(effective, [Access.key(:master_node)], master)
%Noizu.SimplePool.Database.MonitoringFramework.NodeTable{
identifier: effective.identifier,
status: effective.status,
directive: effective.directive,
health_index: effective.health_index,
entity: effective
} |> Noizu.SimplePool.Database.MonitoringFramework.NodeTable.write!()
state = state
|> put_in([Access.key(:environment_details), Access.key(:effective)], effective)
|> put_in([Access.key(:environment_details), Access.key(:default)], initial)
|> put_in([Access.key(:environment_details), Access.key(:status)], :registered)
{:reply, {:ack, state.environment_details.effective}, state}
else
effective = remote_call(master, {:join, node(), initial, options}, context)
state = state
|> put_in([Access.key(:environment_details), Access.key(:effective)], effective)
|> put_in([Access.key(:environment_details), Access.key(:default)], initial)
|> put_in([Access.key(:environment_details), Access.key(:status)], :registered)
{:reply, {:ack, state.environment_details.effective}, state}
end
else
{:reply, {:error, :master_node_required}, state}
end
end
#--------------------------------------
#
#--------------------------------------
def handle_call({:i, {:node_health_check!, options}, context}, _from, state) do
state = update_effective(state, context, options)
{:reply, {:ack, state.environment_details.effective}, state}
end
#--------------------------------------
#
#--------------------------------------
def handle_call({:i, {:lock_server, components, options}, context}, _from, state) do
await_timeout = options[:wait] || 60_000
c = components == :all && Map.keys(state.environment_details.effective.services) || MapSet.to_list(components)
tasks = Enum.reduce(c, [], fn(service, acc) ->
if state.environment_details.effective.services[service] do
acc ++ [Task.async(fn -> {service, state.environment_details.effective.services[service].definition.service.lock!(context, options)} end)]
else
acc
end
end)
state = Enum.reduce(tasks, state, fn(task, acc) ->
case Task.await(task, await_timeout) do
{k, {:ack, s}} -> put_in(acc, [Access.key(:environment_details), Access.key(:effective), Access.key(:services), k], s)
_ -> acc
end
end)
%Noizu.SimplePool.Database.MonitoringFramework.NodeTable{
identifier: state.environment_details.effective.identifier,
status: state.environment_details.effective.status,
directive: state.environment_details.effective.directive,
health_index: state.environment_details.effective.health_index,
entity: state.environment_details.effective
} |> Noizu.SimplePool.Database.MonitoringFramework.NodeTable.write!()
if Map.get(options, :update_hints, true) do
internal_update_hints(state.environment_details.effective, context, options)
end
{:reply, :ack, state}
end
#--------------------------------------
#
#--------------------------------------
def handle_call({:i, {:release_server, components, options}, context}, _from, state) do
await_timeout = options[:wait] || 60_000
c = components == :all && Map.keys(state.environment_details.effective.services) || MapSet.to_list(components)
tasks = Enum.reduce(c, [], fn(service, acc) ->
if state.environment_details.effective.services[service] do
acc ++ [Task.async(fn -> {service, state.environment_details.effective.services[service].definition.service.release!(context, options)} end)]
else
acc
end
end)
state = Enum.reduce(tasks, state, fn(task, acc) ->
case Task.await(task, await_timeout) do
{k, {:ack, s}} -> put_in(acc, [Access.key(:environment_details), Access.key(:effective), Access.key(:services), k], s)
_ -> acc
end
end)
%Noizu.SimplePool.Database.MonitoringFramework.NodeTable{
identifier: state.environment_details.effective.identifier,
status: state.environment_details.effective.status,
directive: state.environment_details.effective.directive,
health_index: state.environment_details.effective.health_index,
entity: state.environment_details.effective
} |> Noizu.SimplePool.Database.MonitoringFramework.NodeTable.write!()
if Map.get(options, :update_hints, true) do
internal_update_hints(state.environment_details.effective, context, options)
end
{:reply, :ack, state}
end
#--------------------------------------
#
#--------------------------------------
def handle_info({:i, {:server_recover_check, _server}, _context}, state) do
# @TODO fully implement
{:noreply, state}
end
#--------------------------------------
#
#--------------------------------------
def handle_info({:DOWN, ref, :process, _process, _msg} = event, state) do
state = cond do
dropped_server = Enum.find(state.environment_details.monitors, fn({_k,v}) -> v == ref end) ->
{server, _ref} = dropped_server
Logger.error "LINK MONITOR: server down #{inspect server} - #{inspect event, pretty: true}"
# todo setup watchers[server] with a timer event that monitors for node recovery
#{:ok, t_ref} = :timer.send_after(@monitor_ms, self(), {:i, {:server_recover_check, server}, Noizu.ElixirCore.CallingContext.system()})
state
|> put_in([Access.key(:environment_details), Access.key(:monitors), server], nil)
#|> update_in([Access.key(:extended), :watchers], &(put_in(&1 || %{}, [server], t_ref)))
true ->
Logger.error "LINK MONITOR: #{inspect event, pretty: true}"
state
end
{:noreply, state}
end
#===========================================================================
# Helper Methods
#===========================================================================
#--------------------------------------
#
#--------------------------------------
def total_unallocated(unallocated) do
Enum.reduce(unallocated, 0, fn({_service, u}, acc) -> acc + u end)
end
#--------------------------------------
#
#--------------------------------------
def fill_to({unallocated, service_allocation}, level, output_server_list, service_list, per_server_targets) do
{total_bandwidth, bandwidth} = Enum.reduce(output_server_list, {%{}, %{}}, fn(server, {tb, b}) ->
Enum.reduce(service_list, {tb, b}, fn(service, {tb2, b2}) ->
cond do
per_server_targets[server][service] ->
psa = (service_allocation[server][service] || 0)
pst = per_server_targets[server][service] || %{}
bandwidth = case level do
:target -> Map.get(pst, :target, 0) - psa
:soft_limit -> Map.get(pst, :soft, 0) - psa
:hard_limit -> Map.get(pst, :hard, 0) - psa
:overflow -> Map.get(pst, :hard, 0)
end |> max(0)
m_b2 = b2
|> update_in([server], &(&1 || %{}))
|> put_in([server, service], bandwidth)
m_tb2 = update_in(tb2, [service], &((&1 || 0) + bandwidth))
{m_tb2, m_b2}
true -> {tb2, b2}
end
end)
end)
# 3. Allocate out up to bandwidth to fill out to target levels
Enum.reduce(bandwidth, {unallocated, service_allocation}, fn ({server, v}, {u, sa}) ->
Enum.reduce(v, {u, sa}, fn({service, b}, {u2, sa2}) ->
cond do
b > 0 && u2[service] > 0 && total_bandwidth[service] > 0 ->
p = min(total_bandwidth[service], unallocated[service])
allocate = round((b/total_bandwidth[service]) * p)
m_u2 = update_in(u2, [service], &(max(0, &1 - allocate)))
m_sa2 = sa2 |> update_in([server], &(&1 || %{}))
|> update_in([server, service], &((&1 || 0) + allocate))
{m_u2, m_sa2}
true -> {u2, sa2}
end
end)
end)
end
#--------------------------------------
#
#--------------------------------------
def profile_start(profiles \\ %{}, profile \\ :default) do
put_in(profiles, [profile], %{start: :os.system_time(:millisecond)})
end
#--------------------------------------
#
#--------------------------------------
def profile_end(profiles, profile \\ :default, options \\ %{info: 100, warn: 300, error: 700, log: true}) do
profiles = update_in(profiles, [profile], fn(p) -> put_in(p || %{}, [:end], :os.system_time(:millisecond)) end)
if options[:log] !== false do
cond do
profiles[profile][:start] == nil -> Logger.warn("#{__MODULE__} - profile_start not invoked for #{profile}")
options[:error] && (profiles[profile][:end] - profiles[profile][:start]) >= options[:error] ->
Logger.error("#{__MODULE__} #{profile} exceeded #{options[:error]} milliseconds @#{(profiles[profile][:end] - profiles[profile][:start])}")
options[:warn] && (profiles[profile][:end] - profiles[profile][:start]) >= options[:warn] ->
Logger.warn(fn -> "#{__MODULE__} #{profile} exceeded #{options[:warn]} milliseconds @#{(profiles[profile][:end] - profiles[profile][:start])}" end)
options[:info] && (profiles[profile][:end] - profiles[profile][:start]) >= options[:info] ->
Logger.info(fn -> "#{__MODULE__} #{profile} exceeded #{options[:info]} milliseconds @#{(profiles[profile][:end] - profiles[profile][:start])}" end)
true -> :ok
end
end
profiles
end
#--------------------------------------
#
#--------------------------------------
def master_node(%State{} = state) do
fg_get(@master_node_cache_key,
fn() ->
default = try do
cond do
state == nil -> {:fast_global, :no_cache, nil}
state.environment_details == nil -> {:fast_global, :no_cache, nil}
state.environment_details.effective == nil -> {:fast_global, :no_cache, nil}
v = state.environment_details.effective.master_node -> v
true -> {:fast_global, :no_cache, nil}
end
rescue _e -> {:fast_global, :no_cache, nil}
catch _e -> {:fast_global, :no_cache, nil}
end
if Noizu.SimplePool.Database.MonitoringFramework.SettingTable.wait(500) == :ok do
case Noizu.SimplePool.Database.MonitoringFramework.SettingTable.read!(:environment_master) do
[%Noizu.SimplePool.Database.MonitoringFramework.SettingTable{value: v}| _] -> v
%Noizu.SimplePool.Database.MonitoringFramework.SettingTable{value: v} -> v
nil -> default
_ -> default
end
else
default
end
end
)
end
def master_node(%{master_node: default} = _state) do
fg_get(@master_node_cache_key,
fn() ->
default = {:fast_global, :no_cache, default}
if Noizu.SimplePool.Database.MonitoringFramework.SettingTable.wait(500) == :ok do
case Noizu.SimplePool.Database.MonitoringFramework.SettingTable.read!(:environment_master) do
[%Noizu.SimplePool.Database.MonitoringFramework.SettingTable{value: v}| _] -> v
%Noizu.SimplePool.Database.MonitoringFramework.SettingTable{value: v} -> v
nil -> default
_ -> default
end
else
default
end
end
)
end
def master_node(_catch_all) do
fg_get(@master_node_cache_key,
fn() ->
default = {:fast_global, :no_cache, nil}
if Noizu.SimplePool.Database.MonitoringFramework.SettingTable.wait(500) == :ok do
case Noizu.SimplePool.Database.MonitoringFramework.SettingTable.read!(:environment_master) do
[%Noizu.SimplePool.Database.MonitoringFramework.SettingTable{value: v}| _] -> v
%Noizu.SimplePool.Database.MonitoringFramework.SettingTable{value: v} -> v
nil -> default
_ -> default
end
else
default
end
end
)
end
#--------------------
# todo move FastGlobal.Cluster into Noizu Core and remove duplicate code.
#--------------------
#-------------------
# fg_get
#-------------------
def fg_get(identifier), do: fg_get(identifier, nil, %{})
def fg_get(identifier, default), do: fg_get(identifier, default, %{})
def fg_get(identifier, default, options) do
case FastGlobal.get(identifier, :no_match) do
:no_match -> fg_sync_record(identifier, default, options)
v -> v
end
end
#-------------------
# sync_record
#-------------------
def fg_sync_record(identifier, default, _options) do
value = if (is_function(default, 0)), do: default.(), else: default
case value do
{:fast_global, :no_cache, v} -> v
value ->
if Semaphore.acquire({:fg_write_record, identifier}, 1) do
FastGlobal.put(identifier, value)
Semaphore.release({:fg_write_record, identifier})
end
value
end
end
end # end defmodule GoldenRatio.Components.Gateway.Server
end # end defmodule GoldenRatio.Components.Gateway
| 44.486545 | 226 | 0.51137 |
ff204a0c5a27b166049e51b82d13c697b9e089f8 | 1,612 | ex | Elixir | lib/forus_web.ex | finalclass/forus | f745d3ef63684b8fa61a904d4032b5ae26931943 | [
"MIT"
] | null | null | null | lib/forus_web.ex | finalclass/forus | f745d3ef63684b8fa61a904d4032b5ae26931943 | [
"MIT"
] | 1 | 2018-06-19T10:38:48.000Z | 2018-06-19T10:38:48.000Z | lib/forus_web.ex | finalclass/forus | f745d3ef63684b8fa61a904d4032b5ae26931943 | [
"MIT"
] | null | null | null | defmodule ForusWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use ForusWeb, :controller
use ForusWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: ForusWeb
import Plug.Conn
import ForusWeb.Router.Helpers
import ForusWeb.Gettext
end
end
def view do
quote do
use Phoenix.View, root: "lib/forus_web/templates",
namespace: ForusWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import ForusWeb.Router.Helpers
import ForusWeb.ErrorHelpers
import ForusWeb.FormsHelpers
import ForusWeb.Gettext
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import ForusWeb.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 23.362319 | 69 | 0.683002 |
ff207264b994e666a306b84e601fecf2179ccb82 | 2,111 | exs | Elixir | test/sanbase/auth/apikey/apikey_test.exs | santiment/sanbase2 | 9ef6e2dd1e377744a6d2bba570ea6bd477a1db31 | [
"MIT"
] | 81 | 2017-11-20T01:20:22.000Z | 2022-03-05T12:04:25.000Z | test/sanbase/auth/apikey/apikey_test.exs | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | 359 | 2017-10-15T14:40:53.000Z | 2022-01-25T13:34:20.000Z | test/sanbase/auth/apikey/apikey_test.exs | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | 16 | 2017-11-19T13:57:40.000Z | 2022-02-07T08:13:02.000Z | defmodule Sanbase.Accounts.ApiKeyTest do
use Sanbase.DataCase, async: false
alias Sanbase.Accounts.{
User,
Apikey
}
setup do
{:ok, user} = User.find_or_insert_by(:email, "as819asdnmaso1011@santiment.net")
%{
user: user
}
end
test "get user by apikey", %{user: user} do
{:ok, apikey} = Apikey.generate_apikey(user)
{:ok, retrieved_user} = Apikey.apikey_to_user(apikey)
assert user.id == retrieved_user.id
end
# Will find th token in the db but will fail at the hmac part
test "fail when apikey second part is invalid", %{user: user} do
{:ok, apikey} = Apikey.generate_apikey(user)
apikey = apikey <> "s"
{:error, error} = Apikey.apikey_to_user(apikey)
assert error =~ "Apikey '#{apikey}' is not valid"
end
# Won't find the token in the db
test "fail when the apikey first part is invalid", %{user: user} do
{:ok, apikey} = Apikey.generate_apikey(user)
apikey = "s" <> apikey
{:error, error} = Apikey.apikey_to_user(apikey)
assert error =~ "Apikey '#{apikey}' is not valid"
end
# Splitting the apikey will fail
test "fail when the apikey cannot be split properly" do
apikey = "notproperlyformatedapikey"
{:error, error} = Apikey.apikey_to_user("notproperlyformatedapikey")
assert error =~ "Apikey '#{apikey}' is malformed"
end
test "revoke apikey", %{user: user} do
# Create and test the apikey
{:ok, apikey} = Apikey.generate_apikey(user)
{:ok, retrieved_user} = Apikey.apikey_to_user(apikey)
assert user.id == retrieved_user.id
# Revoke the apikey and expect it to be non valid
:ok = Apikey.revoke_apikey(user, apikey)
assert {:error, "Apikey '#{apikey}' is not valid"} == Apikey.apikey_to_user(apikey)
end
test "get list of apikeys", %{user: user} do
{:ok, apikey1} = Apikey.generate_apikey(user)
{:ok, apikey2} = Apikey.generate_apikey(user)
assert {:ok, [apikey1, apikey2]} == Apikey.apikeys_list(user)
end
test "get list of apikeys when there are none", %{user: user} do
assert {:ok, []} == Apikey.apikeys_list(user)
end
end
| 31.044118 | 87 | 0.668404 |
ff207f2aaa4e103fa30d5b67f602cf2859a13b55 | 2,970 | ex | Elixir | clients/bigtable_admin/lib/google_api/bigtable_admin/v2/model/audit_config.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/bigtable_admin/lib/google_api/bigtable_admin/v2/model/audit_config.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/bigtable_admin/lib/google_api/bigtable_admin/v2/model/audit_config.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigtableAdmin.V2.Model.AuditConfig do
@moduledoc """
Specifies the audit configuration for a service. The configuration determines which permission types are logged, and what identities, if any, are exempted from logging. An AuditConfig must have one or more AuditLogConfigs. If there are AuditConfigs for both `allServices` and a specific service, the union of the two AuditConfigs is used for that service: the log_types specified in each AuditConfig are enabled, and the exempted_members in each AuditLogConfig are exempted. Example Policy with multiple AuditConfigs: { "audit_configs": [ { "service": "allServices", "audit_log_configs": [ { "log_type": "DATA_READ", "exempted_members": [ "user:jose@example.com" ] }, { "log_type": "DATA_WRITE" }, { "log_type": "ADMIN_READ" } ] }, { "service": "sampleservice.googleapis.com", "audit_log_configs": [ { "log_type": "DATA_READ" }, { "log_type": "DATA_WRITE", "exempted_members": [ "user:aliya@example.com" ] } ] } ] } For sampleservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ logging. It also exempts jose@example.com from DATA_READ logging, and aliya@example.com from DATA_WRITE logging.
## Attributes
* `auditLogConfigs` (*type:* `list(GoogleApi.BigtableAdmin.V2.Model.AuditLogConfig.t)`, *default:* `nil`) - The configuration for logging of each type of permission.
* `service` (*type:* `String.t`, *default:* `nil`) - Specifies a service that will be enabled for audit logging. For example, `storage.googleapis.com`, `cloudsql.googleapis.com`. `allServices` is a special value that covers all services.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:auditLogConfigs => list(GoogleApi.BigtableAdmin.V2.Model.AuditLogConfig.t()),
:service => String.t()
}
field(:auditLogConfigs, as: GoogleApi.BigtableAdmin.V2.Model.AuditLogConfig, type: :list)
field(:service)
end
defimpl Poison.Decoder, for: GoogleApi.BigtableAdmin.V2.Model.AuditConfig do
def decode(value, options) do
GoogleApi.BigtableAdmin.V2.Model.AuditConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.BigtableAdmin.V2.Model.AuditConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 59.4 | 1,106 | 0.748485 |
ff209b4685a9ef36476498fc0af1003666789447 | 1,627 | ex | Elixir | lib/cloudinary/transformation/effect/preview.ex | h-ikeda/cloudinary-elixir | 5e70aedb6d1e51839f1e21c49b40293036b99efd | [
"MIT"
] | 1 | 2021-05-23T09:17:44.000Z | 2021-05-23T09:17:44.000Z | lib/cloudinary/transformation/effect/preview.ex | h-ikeda/cloudinary-elixir | 5e70aedb6d1e51839f1e21c49b40293036b99efd | [
"MIT"
] | 44 | 2020-05-15T03:36:36.000Z | 2022-03-23T21:39:11.000Z | lib/cloudinary/transformation/effect/preview.ex | h-ikeda/cloudinary-elixir | 5e70aedb6d1e51839f1e21c49b40293036b99efd | [
"MIT"
] | null | null | null | defmodule Cloudinary.Transformation.Effect.Preview do
@moduledoc false
defguardp is_duration(duration) when is_number(duration) and duration > 0
defguardp is_max_seg(max_seg) when is_integer(max_seg) and max_seg > 0
@spec to_url_string(%{
optional(:duration) => number,
optional(:max_segments) => pos_integer,
optional(:min_segment_duration) => number
}) :: String.t()
def to_url_string(%{duration: duration, max_segments: max_seg, min_segment_duration: min_dur})
when is_duration(duration) and is_max_seg(max_seg) and is_duration(min_dur) do
"preview:duration_#{duration}:max_seg_#{max_seg}:min_seg_dur_#{min_dur}"
end
def to_url_string(%{duration: duration, max_segments: max_seg})
when is_duration(duration) and is_max_seg(max_seg) do
"preview:duration_#{duration}:max_seg_#{max_seg}"
end
def to_url_string(%{duration: duration, min_segment_duration: min_dur})
when is_duration(duration) and is_duration(min_dur) do
"preview:duration_#{duration}:min_seg_dur_#{min_dur}"
end
def to_url_string(%{max_segments: max_seg, min_segment_duration: min_dur})
when is_max_seg(max_seg) and is_duration(min_dur) do
"preview:max_seg_#{max_seg}:min_seg_dur_#{min_dur}"
end
def to_url_string(%{duration: duration}) when is_duration(duration) do
"preview:duration_#{duration}"
end
def to_url_string(%{max_segments: max_seg}) when is_max_seg(max_seg) do
"preview:max_seg_#{max_seg}"
end
def to_url_string(%{min_segment_duration: min_dur}) when is_duration(min_dur) do
"preview:min_seg_dur_#{min_dur}"
end
end
| 37.837209 | 96 | 0.738783 |
ff20a81db47409fb9d1cd6aecb5b9f811be4b2b1 | 24,852 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/api/reports.ex | kaaboaye/elixir-google-api | 1896784c4342151fd25becd089a5beb323eff567 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/api/reports.ex | kaaboaye/elixir-google-api | 1896784c4342151fd25becd089a5beb323eff567 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/api/reports.ex | kaaboaye/elixir-google-api | 1896784c4342151fd25becd089a5beb323eff567 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V34.Api.Reports do
@moduledoc """
API calls for all endpoints tagged `Reports`.
"""
alias GoogleApi.DFAReporting.V34.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Deletes a report by its ID.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - The DFA user profile ID.
* `report_id` (*type:* `String.t`) - The ID of the report.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_reports_delete(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) :: {:ok, nil} | {:ok, Tesla.Env.t()} | {:error, Tesla.Env.t()}
def dfareporting_reports_delete(
connection,
profile_id,
report_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/reports/{reportId}", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1),
"reportId" => URI.encode(report_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [decode: false])
end
@doc """
Retrieves a report by its ID.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - The DFA user profile ID.
* `report_id` (*type:* `String.t`) - The ID of the report.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V34.Model.Report{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_reports_get(Tesla.Env.client(), String.t(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V34.Model.Report.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def dfareporting_reports_get(
connection,
profile_id,
report_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/reports/{reportId}", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1),
"reportId" => URI.encode(report_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.Report{}])
end
@doc """
Creates a report.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - The DFA user profile ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:body` (*type:* `GoogleApi.DFAReporting.V34.Model.Report.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V34.Model.Report{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_reports_insert(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V34.Model.Report.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def dfareporting_reports_insert(connection, profile_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/reports", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.Report{}])
end
@doc """
Retrieves list of reports.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - The DFA user profile ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:maxResults` (*type:* `integer()`) - Maximum number of results to return.
* `:pageToken` (*type:* `String.t`) - The value of the nextToken from the previous result page.
* `:scope` (*type:* `String.t`) - The scope that defines which results are returned.
* `:sortField` (*type:* `String.t`) - The field by which to sort the list.
* `:sortOrder` (*type:* `String.t`) - Order of sorted results.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V34.Model.ReportList{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_reports_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V34.Model.ReportList.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def dfareporting_reports_list(connection, profile_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:maxResults => :query,
:pageToken => :query,
:scope => :query,
:sortField => :query,
:sortOrder => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/reports", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.ReportList{}])
end
@doc """
Runs a report.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - The DFA profile ID.
* `report_id` (*type:* `String.t`) - The ID of the report.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:synchronous` (*type:* `boolean()`) - If set and true, tries to run the report synchronously.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V34.Model.File{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_reports_run(Tesla.Env.client(), String.t(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V34.Model.File.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def dfareporting_reports_run(
connection,
profile_id,
report_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:synchronous => :query
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/reports/{reportId}/run", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1),
"reportId" => URI.encode(report_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.File{}])
end
@doc """
Updates a report.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - The DFA user profile ID.
* `report_id` (*type:* `String.t`) - The ID of the report.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:body` (*type:* `GoogleApi.DFAReporting.V34.Model.Report.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V34.Model.Report{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_reports_update(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V34.Model.Report.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def dfareporting_reports_update(
connection,
profile_id,
report_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:put)
|> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/reports/{reportId}", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1),
"reportId" => URI.encode(report_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.Report{}])
end
@doc """
Returns the fields that are compatible to be selected in the respective sections of a report criteria, given the fields already selected in the input report and user permissions.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - The DFA user profile ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:body` (*type:* `GoogleApi.DFAReporting.V34.Model.Report.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V34.Model.CompatibleFields{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_reports_compatible_fields_query(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V34.Model.CompatibleFields.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def dfareporting_reports_compatible_fields_query(
connection,
profile_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url(
"/dfareporting/v3.4/userprofiles/{profileId}/reports/compatiblefields/query",
%{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.CompatibleFields{}])
end
@doc """
Retrieves a report file. This method supports media download.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - The DFA profile ID.
* `report_id` (*type:* `String.t`) - The ID of the report.
* `file_id` (*type:* `String.t`) - The ID of the report file.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V34.Model.File{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_reports_files_get(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V34.Model.File.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def dfareporting_reports_files_get(
connection,
profile_id,
report_id,
file_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url(
"/dfareporting/v3.4/userprofiles/{profileId}/reports/{reportId}/files/{fileId}",
%{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1),
"reportId" => URI.encode(report_id, &URI.char_unreserved?/1),
"fileId" => URI.encode(file_id, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.File{}])
end
@doc """
Lists files for a report.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - The DFA profile ID.
* `report_id` (*type:* `String.t`) - The ID of the parent report.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:maxResults` (*type:* `integer()`) - Maximum number of results to return.
* `:pageToken` (*type:* `String.t`) - The value of the nextToken from the previous result page.
* `:sortField` (*type:* `String.t`) - The field by which to sort the list.
* `:sortOrder` (*type:* `String.t`) - Order of sorted results.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V34.Model.FileList{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_reports_files_list(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V34.Model.FileList.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def dfareporting_reports_files_list(
connection,
profile_id,
report_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:maxResults => :query,
:pageToken => :query,
:sortField => :query,
:sortOrder => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/reports/{reportId}/files", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1),
"reportId" => URI.encode(report_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.FileList{}])
end
end
| 41.282392 | 187 | 0.612868 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.