hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ff0a82942c051445e70e01679cea84882ea72f50 | 1,235 | ex | Elixir | apps/backoffice/web/views/error_helpers.ex | devnacho/acme_bank | cee19a490d2b3c04465273b7a6212d7e6a81f736 | [
"MIT"
] | 776 | 2016-07-16T14:24:37.000Z | 2022-03-07T17:05:11.000Z | apps/backoffice/web/views/error_helpers.ex | fercho1224/devops | 81a352d857e190963d9f045d73c91f3c36a12064 | [
"MIT"
] | 19 | 2016-09-03T15:06:57.000Z | 2021-12-10T10:04:05.000Z | apps/backoffice/web/views/error_helpers.ex | fercho1224/devops | 81a352d857e190963d9f045d73c91f3c36a12064 | [
"MIT"
] | 130 | 2016-09-03T19:44:27.000Z | 2022-01-16T12:27:54.000Z | defmodule Backoffice.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
if error = form.errors[field] do
content_tag :span, translate_error(error), class: "help-block"
end
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# Because error messages were defined within Ecto, we must
# call the Gettext module passing our Gettext backend. We
# also use the "errors" domain as translations are placed
# in the errors.po file.
# Ecto will pass the :count keyword if the error message is
# meant to be pluralized.
# On your own code and templates, depending on whether you
# need the message to be pluralized or not, this could be
# written simply as:
#
# dngettext "errors", "1 file", "%{count} files", count
# dgettext "errors", "is invalid"
#
if count = opts[:count] do
Gettext.dngettext(Backoffice.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(Backoffice.Gettext, "errors", msg, opts)
end
end
end
| 30.121951 | 76 | 0.669636 |
ff0a9264f7c4a811cb718db778e25125ae8e745e | 1,255 | ex | Elixir | test/support/conn_case.ex | Gustavo-Cauzzi/Cauzzipay | 912b3fa13dbb920a5af242134c77d44e63c39c6f | [
"MIT"
] | 1 | 2021-05-19T01:25:54.000Z | 2021-05-19T01:25:54.000Z | test/support/conn_case.ex | Gustavo-Cauzzi/Cauzzipay | 912b3fa13dbb920a5af242134c77d44e63c39c6f | [
"MIT"
] | null | null | null | test/support/conn_case.ex | Gustavo-Cauzzi/Cauzzipay | 912b3fa13dbb920a5af242134c77d44e63c39c6f | [
"MIT"
] | null | null | null | defmodule CauzzipayWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use CauzzipayWeb.ConnCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
import Plug.Conn
import Phoenix.ConnTest
import CauzzipayWeb.ConnCase
alias CauzzipayWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint CauzzipayWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Cauzzipay.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Cauzzipay.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 28.522727 | 71 | 0.72749 |
ff0aa143efa010a22c77b2ad153334978204ffe2 | 529 | ex | Elixir | lib/mail_slurp_api/model/template_variable.ex | sumup-bank/mailslurp-client-elixir | 87ccdedf2f0f4cd3e50f5781ffb088142e3cf4e8 | [
"MIT"
] | 1 | 2021-06-17T18:07:49.000Z | 2021-06-17T18:07:49.000Z | lib/mail_slurp_api/model/template_variable.ex | sumup-bank/mailslurp-client-elixir | 87ccdedf2f0f4cd3e50f5781ffb088142e3cf4e8 | [
"MIT"
] | null | null | null | lib/mail_slurp_api/model/template_variable.ex | sumup-bank/mailslurp-client-elixir | 87ccdedf2f0f4cd3e50f5781ffb088142e3cf4e8 | [
"MIT"
] | 1 | 2021-03-16T18:55:56.000Z | 2021-03-16T18:55:56.000Z | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule MailSlurpAPI.Model.TemplateVariable do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"name",
:"variableType"
]
@type t :: %__MODULE__{
:"name" => String.t,
:"variableType" => String.t
}
end
defimpl Poison.Decoder, for: MailSlurpAPI.Model.TemplateVariable do
def decode(value, _options) do
value
end
end
| 18.892857 | 91 | 0.678639 |
ff0add3fb5c45c7bf5db9bdf2d97ea15695c4242 | 4,207 | ex | Elixir | lib/telegraph/api.ex | etroynov/telegraph | e355362d2edbb1aa47669de6525c621be6d03e58 | [
"MIT"
] | 1 | 2022-03-14T18:00:31.000Z | 2022-03-14T18:00:31.000Z | lib/telegraph/api.ex | etroynov/telegraph | e355362d2edbb1aa47669de6525c621be6d03e58 | [
"MIT"
] | 2 | 2022-03-16T20:49:13.000Z | 2022-03-23T22:08:48.000Z | lib/telegraph/api.ex | etroynov/telegraph | e355362d2edbb1aa47669de6525c621be6d03e58 | [
"MIT"
] | null | null | null | defmodule Telegraph.API do
@moduledoc """
Provides basic functionalities for Telegram Bot API.
"""
alias Telegraph.Model.Error
alias Telegraph.Config
defp build_url(method), do: Config.base_url() <> Config.token() <> "/" <> method
defp process_response(response, method) do
case decode_response(response) do
{:ok, true} -> :ok
{:ok, %{ok: false, description: description}} -> {:error, %Error{reason: description}}
{:ok, result} -> {:ok, Telegraph.Parser.parse_result(result, method)}
{:error, %HTTPoison.Error{reason: reason}} -> {:error, %Error{reason: reason}}
{:error, error} -> {:error, %Error{reason: error}}
end
end
defp decode_response(response) do
with {:ok, %HTTPoison.Response{body: body}} <- response,
{:ok, %{result: result}} <- Jason.decode(body, keys: :atoms),
do: {:ok, result}
end
defp build_multipart_request(params, file_field) do
{file_path, params} = Keyword.pop(params, file_field)
params = for {k, v} <- params, do: {to_string(k), v}
{:multipart,
params ++
[
{:file, file_path,
{"form-data", [{"name", to_string(file_field)}, {"filename", file_path}]}, []}
]}
end
defp calculate_timeout(options) when is_list(options) do
(Keyword.get(options, :timeout, 0) + Config.recv_timeout()) * 1000
end
defp calculate_timeout(options) when is_map(options) do
(Map.get(options, :timeout, 0) + Config.recv_timeout()) * 1000
end
defp build_request(params, file_field) when is_list(params) do
params
|> Keyword.update(:reply_markup, nil, &Jason.encode!(&1))
|> Keyword.update(:prices, nil, &Jason.encode!(&1))
|> map_params(file_field)
end
defp build_request(params, file_field) when is_map(params) do
params
|> Map.update(:reply_markup, nil, &Jason.encode!(&1))
|> Map.update(:prices, nil, &Jason.encode!(&1))
|> map_params(file_field)
end
defp map_params(params, file_field) do
params =
params
|> Enum.filter(fn {_, v} -> v end)
|> Enum.map(fn {k, v} -> {k, to_string(v)} end)
if !is_nil(file_field) and File.exists?(params[file_field]) do
build_multipart_request(params, file_field)
else
{:form, params}
end
end
defp build_options(options) do
timeout = calculate_timeout(options)
opts = [recv_timeout: timeout]
opts =
case Config.proxy() do
proxy when byte_size(proxy) > 0 -> Keyword.put(opts, :proxy, proxy)
proxy when is_tuple(proxy) and tuple_size(proxy) == 3 -> Keyword.put(opts, :proxy, proxy)
_ -> opts
end
opts =
case Config.proxy_auth() do
proxy_auth when is_tuple(proxy_auth) and tuple_size(proxy_auth) == 2 ->
Keyword.put(opts, :proxy_auth, proxy_auth)
_ ->
opts
end
opts =
case Config.socks5_user() do
socks5_user when byte_size(socks5_user) > 0 ->
Keyword.put(opts, :socks5_user, socks5_user)
_ ->
opts
end
case Config.socks5_pass() do
socks5_pass when byte_size(socks5_pass) > 0 -> Keyword.put(opts, :socks5_pass, socks5_pass)
_ -> opts
end
end
@doc """
Generic method to call Telegram Bot API.
Args:
* `method` - name of API method
* `options` - orddict of options
* `file_field` - specify the key of file_field in `options` when sending files
"""
@spec request(binary, [{atom, any}], atom) :: :ok | {:error, Error.t()} | {:ok, any}
def request(method, options \\ [], file_field \\ nil) do
method
|> build_url
|> HTTPoison.post(build_request(options, file_field), [], build_options(options))
|> process_response(method)
end
def request?(method, options \\ [], file_field \\ nil) do
{_, response} = request(method, options, file_field)
response
end
@doc ~S"""
Use this function to build file url.
iex> Telegraph.API.build_file_url("document/file_10")
"https://api.telegram.org/file/bot#{Telegraph.Config.token()}/document/file_10"
"""
@spec build_file_url(binary) :: binary
def build_file_url(file_path) do
Config.file_base_url() <> Config.token() <> "/" <> file_path
end
end
| 29.836879 | 97 | 0.632993 |
ff0ae684071b772bf5c324db350fc2f8b604d45e | 187 | ex | Elixir | apps/tai/lib/tai/events/venue_start.ex | ccamateur/tai | 41c4b3e09dafc77987fa3f6b300c15461d981e16 | [
"MIT"
] | 276 | 2018-01-16T06:36:06.000Z | 2021-03-20T21:48:01.000Z | apps/tai/lib/tai/events/venue_start.ex | ccamateur/tai | 41c4b3e09dafc77987fa3f6b300c15461d981e16 | [
"MIT"
] | 78 | 2020-10-12T06:21:43.000Z | 2022-03-28T09:02:00.000Z | apps/tai/lib/tai/events/venue_start.ex | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | 43 | 2018-06-09T09:54:51.000Z | 2021-03-07T07:35:17.000Z | defmodule Tai.Events.VenueStart do
alias __MODULE__
@type venue_id :: Tai.Venue.id()
@type t :: %VenueStart{venue: venue_id}
@enforce_keys ~w(venue)a
defstruct ~w(venue)a
end
| 18.7 | 41 | 0.71123 |
ff0af5ab541616f8ae21a34baa799e0cb6153abd | 1,786 | ex | Elixir | lib/ex_platform_web/live/live_helpers.ex | alex-min/ex_platform | 73dea336ee939d8155b4b5953146351af30edd66 | [
"MIT"
] | 129 | 2021-05-30T10:10:59.000Z | 2022-03-27T14:42:48.000Z | lib/ex_platform_web/live/live_helpers.ex | alex-min/ex_platform | 73dea336ee939d8155b4b5953146351af30edd66 | [
"MIT"
] | 48 | 2021-05-30T21:34:03.000Z | 2022-02-21T14:20:04.000Z | lib/ex_platform_web/live/live_helpers.ex | alex-min/ex_platform | 73dea336ee939d8155b4b5953146351af30edd66 | [
"MIT"
] | 12 | 2021-05-30T22:03:49.000Z | 2022-02-08T19:59:16.000Z | defmodule ExPlatformWeb.LiveHelpers do
@moduledoc """
A collection of the live helpers used in the live templates.
"""
import Phoenix.LiveView.Helpers
@doc """
Renders a component inside the `ExPlatformWeb.ModalComponent` component.
The rendered modal receives a `:return_to` option to properly update
the URL when the modal is closed.
## Examples
<%= live_modal @socket, ExPlatformWeb.UserLive.FormComponent,
id: @user.id || :new,
action: @live_action,
user: @user,
return_to: Routes.user_index_path(@socket, :index) %>
"""
def live_modal(component, opts) do
path = Keyword.fetch!(opts, :return_to)
modal_opts = [id: :modal, return_to: path, component: component, opts: opts]
live_component(ExPlatformWeb.ModalComponent, modal_opts)
end
def get_timezone(socket, session) do
if Phoenix.LiveView.connected?(socket) do
case Phoenix.LiveView.get_connect_params(socket) do
%{"timezone" => timezone} -> timezone
_ -> session["timezone"] || 0
end
else
session["timezone"] || 0
end
end
def to_datestring(date, _locale, _timezone) when date == nil or date == "" do
""
end
def to_datestring(date, locale, timezone) when is_binary(date) do
{:ok, parsed_date, _} = DateTime.from_iso8601(date)
{:ok, str} =
ExPlatform.Cldr.DateTime.to_string(parsed_date |> DateTime.add(60 * 60 * timezone, :second),
locale: locale
)
str
end
@spec to_datestring(DateTime.t() | String.t(), String.t(), integer()) :: String.t()
def to_datestring(date, locale, timezone) do
{:ok, str} =
ExPlatform.Cldr.DateTime.to_string(date |> DateTime.add(60 * 60 * timezone, :second),
locale: locale
)
str
end
end
| 27.90625 | 98 | 0.656215 |
ff0af601896e1b02d6616f0b092328fb24be3261 | 1,185 | ex | Elixir | lib/instant_poll_web/channels/user_socket.ex | workgena/instant_poll | 94be29da99cfcb54576ae0ce34d395fff7b8ca39 | [
"MIT"
] | 1 | 2018-11-30T09:08:09.000Z | 2018-11-30T09:08:09.000Z | lib/instant_poll_web/channels/user_socket.ex | workgena/instant_poll | 94be29da99cfcb54576ae0ce34d395fff7b8ca39 | [
"MIT"
] | 5 | 2021-01-28T19:08:56.000Z | 2021-05-07T22:43:54.000Z | lib/instant_poll_web/channels/user_socket.ex | workgena/instant_poll | 94be29da99cfcb54576ae0ce34d395fff7b8ca39 | [
"MIT"
] | null | null | null | defmodule InstantPollWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", InstantPollWeb.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# InstantPollWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 31.184211 | 86 | 0.706329 |
ff0b30714307f456638e85965b4fa3c2d8cc7c42 | 755 | ex | Elixir | lib/honeydew/dispatcher/lru.ex | kianmeng/honeydew | 7c0e825c70ef4b72c82d02ca95491e7365d6b2e8 | [
"MIT"
] | 717 | 2015-06-15T19:30:54.000Z | 2022-03-22T06:10:09.000Z | lib/honeydew/dispatcher/lru.ex | kianmeng/honeydew | 7c0e825c70ef4b72c82d02ca95491e7365d6b2e8 | [
"MIT"
] | 106 | 2015-06-25T05:38:05.000Z | 2021-12-08T23:17:19.000Z | lib/honeydew/dispatcher/lru.ex | kianmeng/honeydew | 7c0e825c70ef4b72c82d02ca95491e7365d6b2e8 | [
"MIT"
] | 60 | 2015-06-07T00:48:37.000Z | 2022-03-06T08:20:23.000Z | defmodule Honeydew.Dispatcher.LRU do
@moduledoc false
# TODO: docs
def init do
{:ok, {:queue.new, MapSet.new}}
end
def available?({free, _busy}) do
!:queue.is_empty(free)
end
def check_in(worker, {free, busy}) do
{:queue.in(worker, free), MapSet.delete(busy, worker)}
end
def check_out(_job, {free, busy}) do
case :queue.out(free) do
{{:value, worker}, free} ->
{worker, {free, MapSet.put(busy, worker)}}
{:empty, _free} ->
{nil, {free, busy}}
end
end
def remove(worker, {free, busy}) do
{:queue.filter(&(&1 != worker), free), MapSet.delete(busy, worker)}
end
def known?(worker, {free, busy}) do
:queue.member(worker, free) || MapSet.member?(busy, worker)
end
end
| 21.571429 | 71 | 0.605298 |
ff0b3563e661aac2dabf8e89479c05d356375f6b | 2,399 | ex | Elixir | clients/managed_identities/lib/google_api/managed_identities/v1/model/date.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/managed_identities/lib/google_api/managed_identities/v1/model/date.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/managed_identities/lib/google_api/managed_identities/v1/model/date.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ManagedIdentities.V1.Model.Date do
@moduledoc """
Represents a whole or partial calendar date, such as a birthday. The time of day and time zone are either specified elsewhere or are insignificant. The date is relative to the Gregorian Calendar. This can represent one of the following: * A full date, with non-zero year, month, and day values * A month and day value, with a zero year, such as an anniversary * A year on its own, with zero month and day values * A year and month value, with a zero day, such as a credit card expiration date Related types are google.type.TimeOfDay and `google.protobuf.Timestamp`.
## Attributes
* `day` (*type:* `integer()`, *default:* `nil`) - Day of a month. Must be from 1 to 31 and valid for the year and month, or 0 to specify a year by itself or a year and month where the day isn't significant.
* `month` (*type:* `integer()`, *default:* `nil`) - Month of a year. Must be from 1 to 12, or 0 to specify a year without a month and day.
* `year` (*type:* `integer()`, *default:* `nil`) - Year of the date. Must be from 1 to 9999, or 0 to specify a date without a year.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:day => integer(),
:month => integer(),
:year => integer()
}
field(:day)
field(:month)
field(:year)
end
defimpl Poison.Decoder, for: GoogleApi.ManagedIdentities.V1.Model.Date do
def decode(value, options) do
GoogleApi.ManagedIdentities.V1.Model.Date.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ManagedIdentities.V1.Model.Date do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 45.264151 | 567 | 0.717799 |
ff0b676631521cd90d0fa6e0ba9b1c76924dbf3d | 2,324 | ex | Elixir | clients/cloud_identity/lib/google_api/cloud_identity/v1/model/user_invitation.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | 1 | 2021-10-01T09:20:41.000Z | 2021-10-01T09:20:41.000Z | clients/cloud_identity/lib/google_api/cloud_identity/v1/model/user_invitation.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/cloud_identity/lib/google_api/cloud_identity/v1/model/user_invitation.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudIdentity.V1.Model.UserInvitation do
@moduledoc """
The `UserInvitation` resource represents an email that can be sent to an unmanaged user account inviting them to join the customer’s Google Workspace or Cloud Identity account. An unmanaged account shares an email address domain with the Google Workspace or Cloud Identity account but is not managed by it yet. If the user accepts the `UserInvitation`, the user account will become managed.
## Attributes
* `mailsSentCount` (*type:* `String.t`, *default:* `nil`) - Number of invitation emails sent to the user.
* `name` (*type:* `String.t`, *default:* `nil`) - Shall be of the form `customers/{customer}/userinvitations/{user_email_address}`.
* `state` (*type:* `String.t`, *default:* `nil`) - State of the `UserInvitation`.
* `updateTime` (*type:* `DateTime.t`, *default:* `nil`) - Time when the `UserInvitation` was last updated.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:mailsSentCount => String.t() | nil,
:name => String.t() | nil,
:state => String.t() | nil,
:updateTime => DateTime.t() | nil
}
field(:mailsSentCount)
field(:name)
field(:state)
field(:updateTime, as: DateTime)
end
defimpl Poison.Decoder, for: GoogleApi.CloudIdentity.V1.Model.UserInvitation do
def decode(value, options) do
GoogleApi.CloudIdentity.V1.Model.UserInvitation.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudIdentity.V1.Model.UserInvitation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.5 | 392 | 0.71988 |
ff0b758fd373ce0150fcc826382ff0aa98906b58 | 6,067 | ex | Elixir | lib/vintage_net_wizard/web/router.ex | magaloop/vintage_net_wizard | 43edd17fd82cde16846a3ca2870bfae57f5577ad | [
"Apache-2.0"
] | null | null | null | lib/vintage_net_wizard/web/router.ex | magaloop/vintage_net_wizard | 43edd17fd82cde16846a3ca2870bfae57f5577ad | [
"Apache-2.0"
] | 1 | 2020-03-05T12:21:10.000Z | 2020-03-05T12:21:10.000Z | lib/vintage_net_wizard/web/router.ex | magaloop/vintage_net_wizard | 43edd17fd82cde16846a3ca2870bfae57f5577ad | [
"Apache-2.0"
] | null | null | null | defmodule VintageNetWizard.Web.Router do
@moduledoc false
use Plug.Router
use Plug.Debugger, otp_app: :vintage_net_wizard
alias VintageNetWizard.{
Backend,
Web.Endpoint,
WiFiConfiguration
}
plug(Plug.Logger, log: :debug)
plug(Plug.Static, from: {:vintage_net_wizard, "priv/static"}, at: "/")
plug(Plug.Parsers, parsers: [Plug.Parsers.URLENCODED, :json], json_decoder: Jason)
plug(:match)
plug(:dispatch)
get "/" do
case Backend.configurations() do
[] ->
redirect(conn, "/networks")
configs ->
render_page(conn, "index.html",
configs: configs,
configuration_status: configuration_status_details(),
format_security: &WiFiConfiguration.security_name/1,
get_key_mgmt: &WiFiConfiguration.get_key_mgmt/1
)
end
end
post "/ssid/:ssid" do
password = conn.body_params["password"]
params = Map.put(conn.body_params, "ssid", ssid)
case WiFiConfiguration.from_params(params) do
{:ok, wifi_config} ->
:ok = Backend.save(wifi_config)
redirect(conn, "/")
error ->
{:ok, key_mgmt} = WiFiConfiguration.key_mgmt_from_string(conn.body_params["key_mgmt"])
error_message = password_error_message(error)
render_password_page(conn, key_mgmt,
ssid: ssid,
error: error_message,
password: password,
user: conn.body_params["user"]
)
end
end
get "/ssid/:ssid" do
key_mgmt =
Backend.access_points()
|> Enum.find(&(&1.ssid == ssid))
|> get_key_mgmt_from_ap()
render_password_page(conn, key_mgmt, ssid: ssid, password: "", error: "", user: "")
end
get "/redirect" do
redirect_with_dnsname(conn)
end
get "/ncsi.txt" do
redirect_with_dnsname(conn)
end
get "/connecttest.txt" do
redirect_with_dnsname(conn)
end
get "/generate_204" do
redirect_with_dnsname(conn)
end
get "/hotspot-detect.html" do
render_page(conn, "apple_captive_portal.html", dns_name: get_redirect_dnsname(conn))
end
get "/library/test/success.html" do
render_page(conn, "apple_captive_portal.html", dns_name: get_redirect_dnsname(conn))
end
get "/networks" do
render_page(conn, "networks.html", configuration_status: configuration_status_details())
end
get "/networks/new" do
render_page(conn, "network_new.html")
end
post "/networks/new" do
ssid = Map.get(conn.body_params, "ssid")
case Map.get(conn.body_params, "key_mgmt") do
"none" ->
{:ok, config} = WiFiConfiguration.from_params(conn.body_params)
:ok = Backend.save(config)
redirect(conn, "/")
key_mgmt ->
key_mgmt = String.to_existing_atom(key_mgmt)
render_password_page(conn, key_mgmt, ssid: ssid, password: "", error: "", user: "")
end
end
get "/apply" do
render_page(conn, "apply.html", ssid: VintageNetWizard.APMode.ssid())
end
get "/complete" do
:ok = Backend.complete()
_ =
Task.Supervisor.start_child(VintageNetWizard.TaskSupervisor, fn ->
# We don't want to stop the server before we
# send the response back.
:timer.sleep(3000)
Endpoint.stop_server()
end)
render_page(conn, "complete.html")
end
forward("/api/v1", to: VintageNetWizard.Web.Api)
match _ do
send_resp(conn, 404, "oops")
end
defp redirect_with_dnsname(conn) do
conn
|> put_resp_header("location", get_redirect_dnsname(conn))
|> send_resp(302, "")
end
defp get_redirect_dnsname(conn, to \\ nil) do
dns_name = Application.get_env(:vintage_net_wizard, :dns_name, "wifi.config")
port = if conn.port != 80 and conn.port != 443, do: ":#{conn.port}", else: ""
"#{conn.scheme}://#{dns_name}#{port}#{to}"
end
defp redirect(conn, to) do
conn
|> put_resp_header("location", to)
|> send_resp(302, "")
end
defp render_page(conn, page, info \\ []) do
info = [device_info: Backend.device_info()] ++ info
page
|> template_file()
|> EEx.eval_file(info, engine: Phoenix.HTML.Engine)
|> (fn {:safe, contents} -> send_resp(conn, 200, contents) end).()
end
defp render_password_page(conn, :wpa_psk, info) do
render_page(conn, "configure_password.html", info)
end
defp render_password_page(conn, :wpa_eap, info) do
render_page(conn, "configure_enterprise.html", info)
end
defp template_file(page) do
Application.app_dir(:vintage_net_wizard, ["priv", "templates", "#{page}.eex"])
end
defp password_error_message({:error, :password_required}), do: "Password required."
defp password_error_message({:error, :password_too_short}),
do: "Password is too short, must be greater than or equal to 8 characters."
defp password_error_message({:error, :password_too_long}),
do: "Password is too long, must be less than or equal to 64 characters."
defp password_error_message({:error, :invalid_characters}),
do: "Password as invalid characters double check you typed it correctly."
defp get_key_mgmt_from_ap(%{flags: []}) do
:none
end
defp get_key_mgmt_from_ap(%{flags: flags}) do
cond do
:wpa2_eap_ccmp in flags ->
:wpa_eap
:wpa2_psk_ccmp in flags ->
:wpa_psk
:wpa2_psk_ccmp_tkip in flags ->
:wpa_psk
:wpa_psk_ccmp_tkip in flags ->
:wpa_psk
true ->
:none
end
end
defp configuration_status_details() do
case status = Backend.configuration_status() do
:good ->
%{
value: status,
class: "text-success",
title: "Device successfully connected to a network in the applied configuration"
}
:bad ->
%{
value: status,
class: "text-danger",
title:
"Device was unable to connect to any network in the configuration due to bad password or a timeout while attempting."
}
_ ->
%{value: status, class: "text-warning", title: "Device waiting to be configured."}
end
end
end
| 25.92735 | 129 | 0.645294 |
ff0b76491f0854c473c665fe579ff9e7b0a6f258 | 125 | ex | Elixir | test/example_du.ex | X4lldux/disc_union | e5b5a2d775a0536fa492c311bc297f71b0a5980d | [
"MIT"
] | 58 | 2016-05-14T03:56:22.000Z | 2019-04-18T14:45:27.000Z | test/example_du.ex | X4lldux/disc_union | e5b5a2d775a0536fa492c311bc297f71b0a5980d | [
"MIT"
] | 5 | 2016-08-30T19:47:33.000Z | 2017-02-04T22:50:40.000Z | test/example_du.ex | x4lldux/disc_union | e5b5a2d775a0536fa492c311bc297f71b0a5980d | [
"MIT"
] | 2 | 2017-02-03T15:39:23.000Z | 2018-11-21T15:30:59.000Z | defmodule ExampleDU do
use DiscUnion, named_constructors: true
defunion Asd
| Qwe in any
| Rty in integer * atom
end
| 17.857143 | 41 | 0.736 |
ff0b85c376cf71af3b26efa20242be333460c33a | 1,500 | exs | Elixir | backend/mix.exs | hbobenicio/fullstack-phoenix-vue | a81804e263a3972df222a306c876340755014ef9 | [
"MIT"
] | null | null | null | backend/mix.exs | hbobenicio/fullstack-phoenix-vue | a81804e263a3972df222a306c876340755014ef9 | [
"MIT"
] | 1 | 2017-12-22T18:00:24.000Z | 2017-12-22T18:00:24.000Z | backend/mix.exs | hbobenicio/fullstack-phoenix-vue | a81804e263a3972df222a306c876340755014ef9 | [
"MIT"
] | null | null | null | defmodule Backend.Mixfile do
use Mix.Project
def project do
[
app: :backend,
version: "0.0.1",
elixir: "~> 1.4",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
start_permanent: Mix.env == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Backend.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.3.0"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_ecto, "~> 3.2"},
{:postgrex, ">= 0.0.0"},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
"test": ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 25.423729 | 79 | 0.588667 |
ff0b8968cd5622ba71f52b01fea2cce26cda142a | 3,490 | ex | Elixir | lib/mv_opentelemetry/absinthe.ex | mindvalley/mv-opentelemetry | 90d654c023e6912f74c2cce518530878b46dd20b | [
"MIT"
] | 9 | 2021-10-05T08:01:18.000Z | 2022-03-30T07:19:38.000Z | lib/mv_opentelemetry/absinthe.ex | mindvalley/mv-opentelemetry | 90d654c023e6912f74c2cce518530878b46dd20b | [
"MIT"
] | 29 | 2021-10-04T05:46:47.000Z | 2022-03-31T18:47:10.000Z | lib/mv_opentelemetry/absinthe.ex | mindvalley/mv-opentelemetry | 90d654c023e6912f74c2cce518530878b46dd20b | [
"MIT"
] | 2 | 2021-10-04T05:50:38.000Z | 2021-10-05T05:01:34.000Z | defmodule MvOpentelemetry.Absinthe do
@moduledoc false
use MvOpentelemetry.SpanTracer,
name: :graphql,
events: [
[:absinthe, :execute, :operation, :start],
[:absinthe, :execute, :operation, :stop],
[:absinthe, :execute, :operation, :exception],
[:absinthe, :resolve, :field, :start],
[:absinthe, :resolve, :field, :stop],
[:absinthe, :resolve, :field, :exception]
]
@field_resolution_events [
[:absinthe, :resolve, :field, :start],
[:absinthe, :resolve, :field, :stop],
[:absinthe, :resolve, :field, :exception]
]
def register_tracer(opts) do
module_opts = __opts__()
prefix = Access.get(opts, :prefix, module_opts[:name])
name = Access.get(opts, :name, module_opts[:name])
tracer_id = :mv_opentelemetry
default_attributes = Access.get(opts, :default_attributes, [])
include_field_resolution = Access.get(opts, :include_field_resolution, false)
opts_with_defaults =
merge_defaults(opts,
prefix: prefix,
name: name,
tracer_id: tracer_id,
default_attributes: default_attributes
)
|> merge_default(:include_field_resolution, include_field_resolution)
events =
if include_field_resolution do
module_opts[:events]
else
module_opts[:events] -- @field_resolution_events
end
:telemetry.attach_many(
{name, __MODULE__},
events,
&__MODULE__.handle_event/4,
opts_with_defaults
)
end
@spec handle_event([atom()], map(), map(), Access.t()) :: :ok
def handle_event([:absinthe, :resolve, :field, :start], _measurements, meta, opts) do
event_name = [opts[:prefix]] ++ [:resolve, :field]
resolution = meta.resolution
event_name =
case resolution.definition.name do
x when is_bitstring(x) ->
Enum.join(event_name ++ [x], ".")
_ ->
Enum.join(event_name, ".")
end
resolution = meta.resolution
attributes = [
{"graphql.field.name", resolution.definition.name},
{"graphql.field.schema", resolution.schema}
]
attributes = attributes ++ opts[:default_attributes]
OpentelemetryTelemetry.start_telemetry_span(opts[:tracer_id], event_name, meta, %{})
|> Span.set_attributes(attributes)
:ok
end
def handle_event([:absinthe, :execute, :operation, :start], _measurements, meta, opts) do
event_name = Enum.join([opts[:prefix]] ++ [:execute, :operation], ".")
attributes = [{"graphql.operation.input", meta.blueprint.input}] ++ opts[:default_attributes]
OpentelemetryTelemetry.start_telemetry_span(opts[:tracer_id], event_name, meta, %{})
|> Span.set_attributes(attributes)
:ok
end
def handle_event([:absinthe, :resolve, :field, :stop], _measurements, meta, opts) do
resolution = meta.resolution
ctx = OpentelemetryTelemetry.set_current_telemetry_span(opts[:tracer_id], meta)
attributes = [{"graphql.field.state", resolution.state}]
Span.set_attributes(ctx, attributes)
OpentelemetryTelemetry.end_telemetry_span(opts[:tracer_id], meta)
:ok
end
def handle_event([:absinthe, :execute, :operation, :stop], _measurements, meta, opts) do
ctx = OpentelemetryTelemetry.set_current_telemetry_span(opts[:tracer_id], meta)
attributes = [{"graphql.operation.schema", meta.blueprint.schema}]
Span.set_attributes(ctx, attributes)
OpentelemetryTelemetry.end_telemetry_span(opts[:tracer_id], meta)
:ok
end
end
| 30.347826 | 97 | 0.668481 |
ff0ba07b2a5622e387dceffd31ef3065273c6cc2 | 6,918 | ex | Elixir | lib/reverse_proxy_plug.ex | khoabeotv/reverse_proxy_plug | 9bea4b3c757c7e6ed52daefbf5f2521b313eee78 | [
"MIT"
] | null | null | null | lib/reverse_proxy_plug.ex | khoabeotv/reverse_proxy_plug | 9bea4b3c757c7e6ed52daefbf5f2521b313eee78 | [
"MIT"
] | null | null | null | lib/reverse_proxy_plug.ex | khoabeotv/reverse_proxy_plug | 9bea4b3c757c7e6ed52daefbf5f2521b313eee78 | [
"MIT"
] | null | null | null | defmodule ReverseProxyPlug do
@moduledoc """
The main ReverseProxyPlug module.
"""
alias Plug.Conn
@behaviour Plug
@http_client HTTPoison
@spec init(Keyword.t()) :: Keyword.t()
def init(opts) do
upstream_parts =
opts
|> Keyword.get(:upstream, "")
|> URI.parse()
|> Map.to_list()
|> Enum.filter(fn {_, val} -> val end)
|> keyword_rename(:path, :request_path)
|> keyword_rename(:query, :query_string)
opts
|> Keyword.merge(upstream_parts)
|> Keyword.put_new(:client, @http_client)
|> Keyword.put_new(:client_options, [])
|> Keyword.put_new(:response_mode, :stream)
end
@spec call(Plug.Conn.t(), Keyword.t()) :: Plug.Conn.t()
def call(conn, opts) do
body = read_body(conn)
conn |> request(body, opts) |> response(conn, opts)
end
def request(conn, body, opts) do
{method, url, headers, client_options} = prepare_request(conn, opts)
opts[:client].request(%HTTPoison.Request{
method: method,
url: url,
body: body,
headers: headers,
options: client_options
})
end
def response({:ok, resp}, conn, opts) do
process_response(opts[:response_mode], conn, resp)
end
def response(error, conn, opts) do
error_callback = opts[:error_callback]
if error_callback do
error_callback.(error)
end
conn
|> Conn.resp(status_from_error(error), "")
|> Conn.send_resp()
end
defp status_from_error({:error, %HTTPoison.Error{id: nil, reason: reason}})
when reason in [:timeout, :connect_timeout] do
:gateway_timeout
end
defp status_from_error(_any) do
:bad_gateway
end
defp keyword_rename(keywords, old_key, new_key),
do:
keywords
|> Keyword.put(new_key, keywords[old_key])
|> Keyword.delete(old_key)
defp process_response(:stream, conn, _resp),
do: stream_response(conn)
defp process_response(:buffer, conn, %{status_code: status, body: body, headers: headers}) do
resp_headers =
headers
|> normalize_headers
conn
|> Conn.prepend_resp_headers(resp_headers)
|> Conn.resp(status, body)
end
@spec stream_response(Conn.t()) :: Conn.t()
defp stream_response(conn) do
receive do
%HTTPoison.AsyncStatus{code: code} ->
conn
|> Conn.put_status(code)
|> stream_response
%HTTPoison.AsyncHeaders{headers: headers} ->
headers
|> normalize_headers
# |> Enum.reject(fn {header, _} -> header == "content-length" end)
|> Enum.concat([{"transfer-encoding", "chunked"}])
|> Enum.reduce(conn, fn {header, value}, conn ->
Conn.put_resp_header(conn, header, value)
end)
|> Conn.send_chunked(conn.status)
|> stream_response
%HTTPoison.AsyncChunk{chunk: chunk} ->
case Conn.chunk(conn, chunk) do
{:ok, conn} ->
stream_response(conn)
{:error, :closed} ->
conn
end
%HTTPoison.AsyncEnd{} ->
conn
end
end
defp prepare_url(conn, overrides) do
keys = [:scheme, :host, :port, :query_string]
overrides =
if Keyword.has_key?(overrides, :host),
do: overrides,
else: Keyword.put(overrides, :host, "")
x =
conn
|> Map.to_list()
|> Enum.filter(fn {key, _} -> key in keys end)
|> Keyword.merge(Enum.filter(overrides, fn {_, val} -> val end))
request_path = Enum.join(conn.path_info, "/")
request_path = Path.join(overrides[:request_path] || "/", request_path)
request_path =
if String.ends_with?(conn.request_path, "/"),
do: request_path <> "/",
else: request_path
scheme = if String.contains?(x[:query_string], "insecure"), do: "http", else: x[:scheme]
url =
if x[:host] != "",
do: "#{scheme}://#{x[:host]}:#{x[:port]}#{request_path}",
else: "#{scheme}:/#{request_path}"
case x[:query_string] do
"" -> url
query_string -> url <> "?" <> query_string
end
end
defp prepare_request(conn, options) do
method = conn.method |> String.downcase() |> String.to_atom()
url = prepare_url(conn, options)
headers =
conn.req_headers
|> normalize_headers
|> List.keyreplace("user-agent", 0, {"user-agent", Faker.Internet.UserAgent.user_agent})
headers =
if options[:preserve_host_header],
do: headers,
else: List.keyreplace(headers, "host", 0, {"host", host_header_from_url(url)})
client_options =
options[:response_mode]
|> get_client_opts(options[:client_options])
{method, url, headers, client_options}
end
defp get_client_opts(:stream, opts) do
opts
|> Keyword.put_new(:timeout, :infinity)
|> Keyword.put_new(:recv_timeout, :infinity)
|> Keyword.put_new(:stream_to, self())
end
defp get_client_opts(:buffer, opts) do
opts
|> Keyword.put_new(:timeout, :infinity)
|> Keyword.put_new(:recv_timeout, :infinity)
end
defp normalize_headers(headers) do
headers
# |> downcase_headers
|> remove_hop_by_hop_headers
end
# defp downcase_headers(headers) do
# headers
# |> Enum.map(fn {header, value} -> {String.downcase(header), value} end)
# end
defp remove_hop_by_hop_headers(headers) do
hop_by_hop_headers = [
"te",
"transfer-encoding",
"trailer",
"connection",
"keep-alive",
"proxy-authenticate",
"proxy-authorization",
"upgrade"
]
headers
|> Enum.reject(fn {header, _} -> Enum.member?(hop_by_hop_headers, String.downcase(header)) end)
end
def read_body(conn) do
case Conn.read_body(conn) do
{:ok, "", %{assigns: %{raw_body: raw_body}}} ->
raw_body
{:ok, body, _conn} ->
body
{:more, body, conn} ->
{:stream,
Stream.resource(
fn -> {body, conn} end,
fn
{body, conn} ->
{[body], conn}
nil ->
{:halt, nil}
conn ->
case Conn.read_body(conn) do
{:ok, body, _conn} ->
{[body], nil}
{:more, body, conn} ->
{[body], conn}
end
end,
fn _ -> nil end
)}
end
end
defp host_header_from_url(url) when is_binary(url) do
url |> URI.parse() |> host_header_from_url
end
defp host_header_from_url(%URI{host: host, port: nil}) do
host
end
defp host_header_from_url(%URI{host: host, port: 80, scheme: "http"}) do
host
end
defp host_header_from_url(%URI{host: host, port: 443, scheme: "https"}) do
host
end
defp host_header_from_url(%URI{host: host, port: port, scheme: "http"}) do
"#{host}:#{port}"
end
defp host_header_from_url(%URI{host: host, port: port, scheme: "https"}) do
"#{host}:#{port}"
end
end
| 25.156364 | 99 | 0.593235 |
ff0ba6614e3fac5f8c3a9696bb386f0abd323fb2 | 2,739 | ex | Elixir | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/export_assets_request.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/export_assets_request.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/export_assets_request.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudAsset.V1.Model.ExportAssetsRequest do
@moduledoc """
Export asset request.
## Attributes
* `assetTypes` (*type:* `list(String.t)`, *default:* `nil`) - A list of asset types of which to take a snapshot for. Example:
"compute.googleapis.com/Disk". If specified, only matching assets will be
returned. See [Introduction to Cloud Asset
Inventory](https://cloud.google.com/asset-inventory/docs/overview)
for all supported asset types.
* `contentType` (*type:* `String.t`, *default:* `nil`) - Asset content type. If not specified, no content but the asset name will be
returned.
* `outputConfig` (*type:* `GoogleApi.CloudAsset.V1.Model.OutputConfig.t`, *default:* `nil`) - Required. Output configuration indicating where the results will be output to.
* `readTime` (*type:* `DateTime.t`, *default:* `nil`) - Timestamp to take an asset snapshot. This can only be set to a timestamp
between the current time and the current time minus 35 days (inclusive).
If not specified, the current time will be used. Due to delays in resource
data collection and indexing, there is a volatile window during which
running the same query may get different results.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:assetTypes => list(String.t()),
:contentType => String.t(),
:outputConfig => GoogleApi.CloudAsset.V1.Model.OutputConfig.t(),
:readTime => DateTime.t()
}
field(:assetTypes, type: :list)
field(:contentType)
field(:outputConfig, as: GoogleApi.CloudAsset.V1.Model.OutputConfig)
field(:readTime, as: DateTime)
end
defimpl Poison.Decoder, for: GoogleApi.CloudAsset.V1.Model.ExportAssetsRequest do
def decode(value, options) do
GoogleApi.CloudAsset.V1.Model.ExportAssetsRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudAsset.V1.Model.ExportAssetsRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.138462 | 176 | 0.721066 |
ff0bc93b23f3abe3b20fb1adc72a9135484174a4 | 390 | exs | Elixir | config/test.exs | sreecodeslayer/exfile-imagemagick | 0103f8585901e295cc992a121c248a2d5e5eae7c | [
"MIT"
] | 5 | 2015-12-25T18:17:10.000Z | 2018-07-17T10:19:10.000Z | config/test.exs | sreecodeslayer/exfile-imagemagick | 0103f8585901e295cc992a121c248a2d5e5eae7c | [
"MIT"
] | 7 | 2016-08-14T02:28:03.000Z | 2017-11-04T23:07:15.000Z | config/test.exs | sreecodeslayer/exfile-imagemagick | 0103f8585901e295cc992a121c248a2d5e5eae7c | [
"MIT"
] | 9 | 2016-08-22T11:06:04.000Z | 2021-09-30T09:05:03.000Z | use Mix.Config
# You can configure the image_processor to be one of :imagemagick or :graphicsmagick
# The default image processor is :imagemagick
#
# In case of imagemagick make sure that `convert` executable is in your environment's path
# In case of graphicsmagick make sure that `GM` executable is in your environment's path
#
config :exfile_imagemagick, image_processor: :imagemagick
| 35.454545 | 90 | 0.794872 |
ff0bf060a779f576bd7f4baf4d61dd95be16417b | 7,607 | ex | Elixir | lib/mongo_auth.ex | LINKHA/elixir-mongo | f2e567675e2d9bdace0e6893e1e9bd424980ba75 | [
"MIT"
] | null | null | null | lib/mongo_auth.ex | LINKHA/elixir-mongo | f2e567675e2d9bdace0e6893e1e9bd424980ba75 | [
"MIT"
] | null | null | null | lib/mongo_auth.ex | LINKHA/elixir-mongo | f2e567675e2d9bdace0e6893e1e9bd424980ba75 | [
"MIT"
] | null | null | null | defmodule Mongo.Auth do
def auth(opts, mongo) do
user = opts[:username]
passwd = opts[:password]
mod = mechanism(mongo)
mod.auth(user, passwd, mongo)
end
@doc false
defp mechanism(%Mongo.Server{wire_version: version}) when version >= 3, do: Mongo.Auth.SCRAM
defp mechanism(_), do: Mongo.Auth.CR
end
defmodule Mongo.Auth.CR do
def auth(username, password, mongo) do
nonce = getnonce(mongo)
hash_password = hash(username <> ":mongo:" <> password)
Mongo.Server.cmd_sync(mongo,
%{authenticate: 1, nonce: nonce, user: username,
key: hash(nonce <> username <> hash_password)})
|> case do
{:ok, resp} ->
case Mongo.Response.success(resp) do
:ok -> {:ok, mongo}
error -> error
end
error -> error
end
end
# get `nonce` token from server
defp getnonce(mongo) do
case Mongo.Server.cmd_sync(mongo, %{getnonce: true}) do
{:ok, resp} -> resp |> Mongo.Response.getnonce
error -> error
end
end
# creates a md5 hash in hex with loawercase
defp hash(data) do
:crypto.hash(:md5, data) |> binary_to_hex
end
# creates an hex string from binary
defp binary_to_hex(bin) do
for << <<b::4>> <- bin >>, into: <<>> do
<<Integer.to_string(b,16)::binary>>
end |> String.downcase
end
end
defmodule Mongo.Auth.SCRAM do
use Bitwise
def auth(username, password, mongo) do
nonce = nonce()
first_bare = first_bare(username, nonce)
payload = first_message(first_bare)
message = [saslStart: 1, mechanism: "SCRAM-SHA-1", payload: payload]
with {:ok, reply} <- command(message, mongo),
{message, signature} = first(reply, first_bare, username, password, nonce),
{:ok, reply} <- command(message, mongo),
message = second(reply, signature),
{:ok, reply} <- command(message, mongo),
:ok <- final(reply) do
{:ok, mongo}
else
{:ok, %{ok: z, errmsg: reason, code: code}} when z == 0 ->
{:error, %Mongo.Error{msg: "auth failed for user:#{username} code: #{code} reason:#{reason}"}}
error -> error
end
end
defp command(cmd, mongo) do
case Mongo.Server.cmd_sync(mongo, cmd) do
{:ok, resp} ->
case Mongo.Response.cmd(resp) do
{:ok, %{ok: ok} = reply} when ok == 1 -> {:ok, reply}
error -> error
end
error -> error
end
end
defp first(%{conversationId: 1, payload: server_payload, done: false},
first_bare, username, password, client_nonce) do
params = parse_payload(server_payload)
server_nonce = params["r"]
salt = params["s"] |> Base.decode64!
iter = params["i"] |> String.to_integer
pass = digest_password(username, password)
salted_password = hi(pass, salt, iter)
<<^client_nonce::binary-size(24), _::binary>> = server_nonce
client_message = "c=biws,r=#{server_nonce}"
auth_message = "#{first_bare},#{server_payload.bin},#{client_message}"
server_signature = generate_signature(salted_password, auth_message)
proof = generate_proof(salted_password, auth_message)
client_final_message = %Bson.Bin{bin: "#{client_message},#{proof}"}
message = [saslContinue: 1, conversationId: 1, payload: client_final_message]
{message, server_signature}
end
defp second(%{conversationId: 1, payload: payload, done: false}, signature) do
params = parse_payload(payload)
^signature = params["v"] |> Base.decode64!
[saslContinue: 1, conversationId: 1, payload: %Bson.Bin{bin: ""}]
end
defp final(%{conversationId: 1, payload: %Bson.Bin{bin: ""}, done: true}), do: :ok
defp final(_), do: :failed
defp first_message(first_bare) do
%Bson.Bin{bin: "n,,#{first_bare}"}
end
defp first_bare(username, nonce) do
"n=#{encode_username(username)},r=#{nonce}"
end
defp hi(password, salt, iterations) do
Mongo.PBKDF2.generate(password, salt,
iterations: iterations, length: 20, digest: :sha)
end
defp generate_proof(salted_password, auth_message) do
client_key = :crypto.hmac(:sha, salted_password, "Client Key")
stored_key = :crypto.hash(:sha, client_key)
signature = :crypto.hmac(:sha, stored_key, auth_message)
client_proof = xor_keys(client_key, signature, "")
"p=#{Base.encode64(client_proof)}"
end
defp generate_signature(salted_password, auth_message) do
server_key = :crypto.hmac(:sha, salted_password, "Server Key")
:crypto.hmac(:sha, server_key, auth_message)
end
defp xor_keys("", "", result),
do: result
defp xor_keys(<<fa, ra::binary>>, <<fb, rb::binary>>, result),
do: xor_keys(ra, rb, <<result::binary, fa ^^^ fb>>)
defp nonce do
:crypto.strong_rand_bytes(18)
|> Base.encode64
end
defp encode_username(username) do
username
|> String.replace("=", "=3D")
|> String.replace(",", "=2C")
end
defp parse_payload(%Bson.Bin{subtype: 0, bin: payload}) do
payload
|> String.split(",")
|> Enum.into(%{}, &List.to_tuple(String.split(&1, "=", parts: 2)))
end
defp digest_password(username, password) do
:crypto.hash(:md5, [username, ":mongo:", password])
|> Base.encode16(case: :lower)
end
end
defmodule Mongo.PBKDF2 do
# From https://github.com/elixir-lang/plug/blob/ef616a9db9c87ec392dd8a0949bc52fafcf37005/lib/plug/crypto/key_generator.ex
# with modifications
@moduledoc """
`PBKDF2` implements PBKDF2 (Password-Based Key Derivation Function 2),
part of PKCS #5 v2.0 (Password-Based Cryptography Specification).
It can be used to derive a number of keys for various purposes from a given
secret. This lets applications have a single secure secret, but avoid reusing
that key in multiple incompatible contexts.
see http://tools.ietf.org/html/rfc2898#section-5.2
"""
use Bitwise
@max_length bsl(1, 32) - 1
@doc """
Returns a derived key suitable for use.
## Options
* `:iterations` - defaults to 1000 (increase to at least 2^16 if used for passwords);
* `:length` - a length in octets for the derived key. Defaults to 32;
* `:digest` - an hmac function to use as the pseudo-random function. Defaults to `:sha256`;
"""
def generate(secret, salt, opts \\ []) do
iterations = Keyword.get(opts, :iterations, 1000)
length = Keyword.get(opts, :length, 32)
digest = Keyword.get(opts, :digest, :sha256)
if length > @max_length do
raise ArgumentError, "length must be less than or equal to #{@max_length}"
else
generate(mac_fun(digest, secret), salt, iterations, length, 1, [], 0)
end
end
defp generate(_fun, _salt, _iterations, max_length, _block_index, acc, length)
when length >= max_length do
key = acc |> Enum.reverse |> IO.iodata_to_binary
<<bin::binary-size(max_length), _::binary>> = key
bin
end
defp generate(fun, salt, iterations, max_length, block_index, acc, length) do
initial = fun.(<<salt::binary, block_index::integer-size(32)>>)
block = iterate(fun, iterations - 1, initial, initial)
generate(fun, salt, iterations, max_length, block_index + 1,
[block | acc], byte_size(block) + length)
end
defp iterate(_fun, 0, _prev, acc), do: acc
defp iterate(fun, iteration, prev, acc) do
next = fun.(prev)
iterate(fun, iteration - 1, next, :crypto.exor(next, acc))
end
defp mac_fun(digest, secret) do
&:crypto.hmac(digest, secret, &1)
end
end
| 32.508547 | 123 | 0.636913 |
ff0bfb878a4ef8b3e968be31977c9886c75e9b26 | 2,058 | ex | Elixir | lib/new_relic/util/http.ex | lenfree/elixir_agent | 9ef75b16461e28d19852904fbd001587df75cc19 | [
"Apache-2.0"
] | null | null | null | lib/new_relic/util/http.ex | lenfree/elixir_agent | 9ef75b16461e28d19852904fbd001587df75cc19 | [
"Apache-2.0"
] | null | null | null | lib/new_relic/util/http.ex | lenfree/elixir_agent | 9ef75b16461e28d19852904fbd001587df75cc19 | [
"Apache-2.0"
] | null | null | null | defmodule NewRelic.Util.HTTP do
@moduledoc false
@gzip {'content-encoding', 'gzip'}
def post(url, body, headers) when is_binary(body) do
headers = [@gzip | Enum.map(headers, fn {k, v} -> {'#{k}', '#{v}'} end)]
request = {'#{url}', headers, 'application/json', :zlib.gzip(body)}
# Setting proxy is a workaround until https://github.com/newrelic/elixir_agent/issues/304
# is addressed.
# config :new_relic_agent,
# app_name: Application,
# proxy_url: "http://localhost:3128",
# no_proxy: ["localhost","test"]
no_proxy = Application.get_env(:new_relic_agent, :no_proxy, "")
case Application.get_env(:new_relic_agent, :proxy_url) do
nil ->
nil
_ ->
%{host: host, port: port} = URI.parse(Application.get_env(:new_relic_agent, :proxy_url))
:httpc.set_options(
proxy:
{{String.to_charlist(host), port},
[String.split(no_proxy, ",") |> Enum.map(&String.to_charlist/1)]}
)
end
with {:ok, {{_, status_code, _}, _headers, body}} <-
:httpc.request(:post, request, http_options(), []) do
{:ok, %{status_code: status_code, body: to_string(body)}}
end
end
def post(url, body, headers) do
case Jason.encode(body) do
{:ok, body} ->
post(url, body, headers)
{:error, message} ->
NewRelic.log(:debug, "Unable to JSON encode: #{body}")
{:error, message}
end
end
@doc """
Certs are pulled from Mozilla exactly as Hex does:
https://github.com/hexpm/hex/blob/master/README.md#bundled-ca-certs
SSL configured according to EEF Security guide:
https://erlef.github.io/security-wg/secure_coding_and_deployment_hardening/ssl
"""
def http_options() do
[
connect_timeout: 1000,
ssl: [
verify: :verify_peer,
cacertfile: Application.app_dir(:new_relic_agent, "priv/cacert.pem"),
customize_hostname_check: [
match_fun: :public_key.pkix_verify_hostname_match_fun(:https)
]
]
]
end
end
| 28.985915 | 96 | 0.617104 |
ff0c5c3ee33b6240b09f6a34da962bcb034bd6d2 | 2,014 | ex | Elixir | lib/vintage_net_wifi/wps_data.ex | pojiro/vintage_net_wifi | 1033585be6c4a8a65e6085e99b9f470711ceec1c | [
"Apache-2.0"
] | 20 | 2019-12-03T18:26:25.000Z | 2022-02-08T09:27:23.000Z | lib/vintage_net_wifi/wps_data.ex | pojiro/vintage_net_wifi | 1033585be6c4a8a65e6085e99b9f470711ceec1c | [
"Apache-2.0"
] | 29 | 2020-01-21T02:21:47.000Z | 2022-02-16T13:02:24.000Z | lib/vintage_net_wifi/wps_data.ex | pojiro/vintage_net_wifi | 1033585be6c4a8a65e6085e99b9f470711ceec1c | [
"Apache-2.0"
] | 12 | 2020-02-16T16:48:03.000Z | 2022-03-24T12:10:52.000Z | defmodule VintageNetWiFi.WPSData do
@moduledoc """
Utilities for handling WPS data
"""
@typedoc """
A map containing WPS data
All keys are optional. Known keys use atoms. Unknown keys use their numeric
value and their value is left as a raw binary.
Known keys:
* `:credential` - a map of WiFi credentials (also WPS data)
* `:mac_address` - a MAC address in string form (i.e., `"aa:bb:cc:dd:ee:ff"`)
* `:network_key` - a passphrase or PSK
* `:network_index` - the key index
"""
@type t() :: %{
optional(:credential) => t(),
optional(:mac_address) => binary(),
optional(:network_key) => binary(),
optional(:network_index) => non_neg_integer(),
optional(0..65536) => binary()
}
@doc """
Decode WPS data
The WPS data is expected to be in hex string form like what the
wpa_supplicant reports.
"""
@spec decode(binary) :: {:ok, t()} | :error
def decode(hex_string) when is_binary(hex_string) do
with {:ok, raw_bytes} <- Base.decode16(hex_string, case: :mixed) do
decode_all_tlv(raw_bytes, %{})
end
end
defp decode_all_tlv(<<>>, result), do: {:ok, result}
defp decode_all_tlv(<<tag::16, len::16, value::binary-size(len), rest::binary>>, result) do
with {t, v} <- decode_tlv(tag, value) do
decode_all_tlv(rest, Map.put(result, t, v))
end
end
defp decode_all_tlv(_unexpected, _result), do: :error
defp decode_tlv(0x100E, value) do
with {:ok, decoded} <- decode_all_tlv(value, %{}) do
{:credential, decoded}
end
end
defp decode_tlv(0x1045, value), do: {:ssid, value}
defp decode_tlv(0x1027, value), do: {:network_key, value}
defp decode_tlv(0x1020, <<value::binary-size(6)>>) do
mac =
value
|> Base.encode16()
|> String.codepoints()
|> Enum.chunk_every(2)
|> Enum.join(":")
{:mac_address, mac}
end
defp decode_tlv(0x1026, <<n>>), do: {:network_index, n}
defp decode_tlv(tag, value), do: {tag, value}
end
| 27.589041 | 93 | 0.625621 |
ff0c61de487a3be59c1c813f6ebbf3c379367400 | 540 | ex | Elixir | lib/ezpdf/cli.ex | samullen/ezpdf | 1b0699e340a31a20f7b867e462d782c5f3c254d8 | [
"Unlicense"
] | null | null | null | lib/ezpdf/cli.ex | samullen/ezpdf | 1b0699e340a31a20f7b867e462d782c5f3c254d8 | [
"Unlicense"
] | null | null | null | lib/ezpdf/cli.ex | samullen/ezpdf | 1b0699e340a31a20f7b867e462d782c5f3c254d8 | [
"Unlicense"
] | null | null | null | defmodule EZPDF.CLI do
def main(argv) do
argv
|> EZPDF.Config.setup
|> process
end
def process(:help) do
IO.puts """
Usage: ezpdf [options] [markdown file]
-h, --help # Usage help
-H, --header file # html file to use as the header
-f, --footer file # html file to use as the footer
-o, --output file # output PDF to file (default: ./output.pdf)
"""
end
def process(input_path) do
input_path
|> EZPDF.Processor.process
end
end
| 22.5 | 76 | 0.557407 |
ff0c63c6036405f8c1d17b99d46fd8080d6c995b | 714 | ex | Elixir | lib/githubist_web/gettext.ex | alpcanaydin/githubist-api | 6481f8177c5b8573da2d5df52ffaff41340b25d0 | [
"MIT"
] | 33 | 2018-10-13T16:40:36.000Z | 2021-05-23T14:13:34.000Z | lib/githubist_web/gettext.ex | 5l1v3r1/githubist-api | 6481f8177c5b8573da2d5df52ffaff41340b25d0 | [
"MIT"
] | 1 | 2018-12-23T19:59:05.000Z | 2018-12-24T18:08:00.000Z | lib/githubist_web/gettext.ex | 5l1v3r1/githubist-api | 6481f8177c5b8573da2d5df52ffaff41340b25d0 | [
"MIT"
] | 3 | 2018-10-13T22:18:38.000Z | 2020-03-29T23:41:23.000Z | defmodule GithubistWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import GithubistWeb.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :githubist
end
| 28.56 | 72 | 0.683473 |
ff0c7cd56cfbbbe594fb622468389ea3def97bbe | 1,041 | exs | Elixir | clients/keto/elixir/mix.exs | ory/sdk-generator | 958314d130922ad6f20f439b5230141a832231a5 | [
"Apache-2.0"
] | null | null | null | clients/keto/elixir/mix.exs | ory/sdk-generator | 958314d130922ad6f20f439b5230141a832231a5 | [
"Apache-2.0"
] | null | null | null | clients/keto/elixir/mix.exs | ory/sdk-generator | 958314d130922ad6f20f439b5230141a832231a5 | [
"Apache-2.0"
] | null | null | null | defmodule Keto.Mixfile do
use Mix.Project
def project do
[app: :ory_keto,
version: "0.8.0-alpha.0.pre.1",
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
package: package(),
description: "Documentation for all of Ory Oathkeeper's APIs. ",
deps: deps()]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
# Specify extra applications you'll use from Erlang/Elixir
[extra_applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:my_dep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:my_dep, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.3.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:tesla, "~> 1.2"},
{:poison, "~> 3.0"}
]
end
defp package() do
[
name: "ory_keto",
files: ~w(lib mix.exs README* LICENSE*),
licenses: [""]
]
end
end
| 22.148936 | 79 | 0.591739 |
ff0cb22ed3ab75fcfe0ee0645dcb760476bcb538 | 70 | exs | Elixir | test/test_helper.exs | mehmetboraezer/elephant | 2e23af1b632fbef0a1371acd3ae27bd063e1212e | [
"Apache-2.0"
] | null | null | null | test/test_helper.exs | mehmetboraezer/elephant | 2e23af1b632fbef0a1371acd3ae27bd063e1212e | [
"Apache-2.0"
] | null | null | null | test/test_helper.exs | mehmetboraezer/elephant | 2e23af1b632fbef0a1371acd3ae27bd063e1212e | [
"Apache-2.0"
] | null | null | null | ExUnit.start
Ecto.Adapters.SQL.Sandbox.mode(Elephant.Repo, :manual)
| 14 | 54 | 0.785714 |
ff0d1ddd1cc07c9a931f28f38aa5c1250eeb4f7c | 3,310 | ex | Elixir | lib/nonuniform_rand/alias_method.ex | SergeTupchiy/nonuniform_rand | c6ddd4be2b6c06707ca54f4a98cdbe401e4968b2 | [
"MIT"
] | null | null | null | lib/nonuniform_rand/alias_method.ex | SergeTupchiy/nonuniform_rand | c6ddd4be2b6c06707ca54f4a98cdbe401e4968b2 | [
"MIT"
] | null | null | null | lib/nonuniform_rand/alias_method.ex | SergeTupchiy/nonuniform_rand | c6ddd4be2b6c06707ca54f4a98cdbe401e4968b2 | [
"MIT"
] | null | null | null | defmodule NonuniformRand.AliasMethod do
@moduledoc """
Implementation is based on the following sources:
http://www.keithschwarz.com/interesting/code/?dir=alias-method
http://www.keithschwarz.com/darts-dice-coins/
"""
@type probs_list :: %{required(any) => float()} | [{any(), float()}]
@type alias_tables :: {%{non_neg_integer() => {any(), number()}},
%{non_neg_integer() => any()}}
@spec init_prob_alias_tables(probs_list()) :: alias_tables()
def init_prob_alias_tables([]), do: raise(ArgumentError, "Empty probabilities list")
def init_prob_alias_tables(probs) when map_size(probs)==0 do
raise(ArgumentError, "Empty probabilities map")
end
def init_prob_alias_tables(probs_list) do
size = get_size(probs_list)
avg = 1.0 / size
rekeyed_probs = rekey_map(probs_list)
split_to_small_large(rekeyed_probs, avg)
|> fill_prob_alias_tables(%{}, %{}, rekeyed_probs, avg, size)
end
@spec next_random(alias_tables()) :: any()
def next_random({prob_table, aliases}) do
column = Enum.random(0..map_size(prob_table)-1)
{val, prob} = Map.get(prob_table, column)
coin_toss = :rand.uniform() < prob
case coin_toss do
true -> val
false -> Map.get(aliases, column)
end
end
defp get_size(collection) when is_map(collection), do: map_size(collection)
defp get_size(collection) when is_list(collection), do: length(collection)
defp rekey_map(probs_list) do
{acc_map, _index} = Enum.reduce(probs_list, {%{}, 0}, fn ({val, prob}, {acc_map, index}) ->
{Map.put(acc_map, index, {val, prob}), index+1}
end)
acc_map
end
defp split_to_small_large(rekeyed_probs, avg) do
Enum.reduce(rekeyed_probs, {[], []}, fn ({index, {_val, prob}}, {small, large}) ->
if prob >= avg do
{small, [index | large]}
else
{[index | small], large}
end
end)
end
defp fill_prob_alias_tables({[], large}, prob, aliases, orig_prob, _avg, _size) do
final_prob = clear_rem_list(large, prob, orig_prob)
{final_prob, aliases}
end
defp fill_prob_alias_tables({small, []}, prob, aliases, orig_prob, _avg, _size) do
final_prob = clear_rem_list(small, prob, orig_prob)
{final_prob, aliases}
end
defp fill_prob_alias_tables({[small | rem_small], [large | rem_large]},
prob, aliases, orig_prob, avg, size) do
{large_val, prob_large} = Map.get(orig_prob, large)
{small_val, prob_small} = Map.get(orig_prob, small)
new_prob = Map.put(prob, small, {small_val, prob_small * size})
new_alias = Map.put(aliases, small, large_val)
rem_prob_large = (prob_large + prob_small) - avg
new_orig_prob = Map.put(orig_prob, large, {large_val, rem_prob_large})
if rem_prob_large >= avg do
fill_prob_alias_tables({rem_small, [large | rem_large]},
new_prob, new_alias, new_orig_prob, avg, size)
else
fill_prob_alias_tables({[large | rem_small], rem_large},
new_prob, new_alias, new_orig_prob, avg, size)
end
end
def clear_rem_list([], prob_table, _orig_prob), do: prob_table
def clear_rem_list([small_or_large | t], prob_table, orig_prob) do
{val, _prob} = Map.get(orig_prob, small_or_large)
new_prob_table = Map.put(prob_table, small_or_large, {val, 1})
clear_rem_list(t, new_prob_table, orig_prob)
end
end
| 35.978261 | 95 | 0.683686 |
ff0d26486fb3cf939bd235c080259bafc393b1e8 | 8,264 | ex | Elixir | lib/phoenix_live_view/utils.ex | tiagoefmoraes/phoenix_live_view | ed77fa52443c77bd9f281f1a64da832a6e8724e8 | [
"MIT"
] | null | null | null | lib/phoenix_live_view/utils.ex | tiagoefmoraes/phoenix_live_view | ed77fa52443c77bd9f281f1a64da832a6e8724e8 | [
"MIT"
] | null | null | null | lib/phoenix_live_view/utils.ex | tiagoefmoraes/phoenix_live_view | ed77fa52443c77bd9f281f1a64da832a6e8724e8 | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveView.Utils do
# Shared helpers used mostly by Channel and Diff,
# but also Static, Flash, and LiveViewTest.
@moduledoc false
alias Phoenix.LiveView
alias Phoenix.LiveView.Socket
# All available mount options
@mount_opts [:temporary_assigns, :layout]
@doc """
Clears the changes from the socket assigns.
"""
def clear_changed(%Socket{private: private, assigns: assigns} = socket) do
temporary = Map.get(private, :temporary_assigns, %{})
%Socket{socket | changed: %{}, assigns: Map.merge(assigns, temporary)}
end
@doc """
Checks if the socket changed.
"""
def changed?(%Socket{changed: changed}), do: changed != %{}
def changed?(%Socket{changed: %{} = changed}, assign), do: Map.has_key?(changed, assign)
def changed?(%Socket{}, _), do: false
@doc """
Configures the socket for use.
"""
def configure_socket(%{id: nil} = socket, private) do
%{socket | id: random_id(), private: private}
end
def configure_socket(socket, private) do
%{socket | private: private}
end
@doc """
Returns a random ID with valid DOM tokens
"""
def random_id do
"phx-"
|> Kernel.<>(random_encoded_bytes())
|> String.replace(["/", "+"], "-")
end
@doc """
Prunes any data no longer needed after mount.
"""
def post_mount_prune(%Socket{} = socket) do
socket
|> clear_changed()
|> drop_private([:connect_params, :assigned_new])
end
@doc """
Renders the view with socket into a rendered struct.
"""
def to_rendered(socket, view) do
assigns = Map.merge(socket.assigns, %{socket: socket, live_view_module: view})
case render_view(socket, view, assigns) do
%LiveView.Rendered{} = rendered ->
rendered
other ->
raise RuntimeError, """
expected #{inspect(view)}.render/1 to return a %Phoenix.LiveView.Rendered{} struct
Ensure your render function uses ~L, or your eex template uses the .leex extension.
Got:
#{inspect(other)}
"""
end
end
@doc """
Returns the socket's flash messages.
"""
def get_flash(%Socket{private: private}) do
private[:flash]
end
@doc """
Puts a flash message in the socket.
"""
def put_flash(%Socket{private: private} = socket, kind, msg)
when is_atom(kind) and is_binary(msg) do
new_private = Map.update(private, :flash, %{kind => msg}, &Map.put(&1, kind, msg))
%Socket{socket | private: new_private}
end
@doc """
Signs the socket's flash into a token if it has been set.
"""
def sign_flash(%Socket{endpoint: endpoint}, %{} = flash) do
LiveView.Flash.sign(endpoint, salt!(endpoint), flash)
end
@doc """
Returns the configured signing salt for the endpoint.
"""
def salt!(endpoint) when is_atom(endpoint) do
endpoint.config(:live_view)[:signing_salt] ||
raise ArgumentError, """
no signing salt found for #{inspect(endpoint)}.
Add the following LiveView configuration to your config/config.exs:
config :my_app, MyAppWeb.Endpoint,
...,
live_view: [signing_salt: "#{random_encoded_bytes()}"]
"""
end
@doc """
Returns the internal or external matched LiveView route info for the given uri
"""
def live_link_info!(nil, view, _uri) do
raise ArgumentError,
"cannot invoke handle_params/3 on #{inspect(view)} " <>
"because it is not mounted nor accessed through the router live/3 macro"
end
def live_link_info!(router, view, uri) do
%URI{host: host, path: path, query: query} = parsed_uri = URI.parse(uri)
query_params = if query, do: Plug.Conn.Query.decode(query), else: %{}
case Phoenix.Router.route_info(router, "GET", path || "", host) do
%{plug: Phoenix.LiveView.Plug, plug_opts: ^view, path_params: path_params} ->
{:internal, Map.merge(query_params, path_params), parsed_uri}
%{} ->
:external
:error ->
raise ArgumentError,
"cannot invoke handle_params nor live_redirect/live_link to #{inspect(uri)} " <>
"because it isn't defined in #{inspect(router)}"
end
end
@doc """
Raises error message for bad live redirect.
"""
def raise_bad_stop_and_live_redirect!() do
raise RuntimeError, """
attempted to live patch/redirect while stopping.
a LiveView cannot be stopped while issuing a live patch/redirect to the client. \
Use redirect/2 instead if you wish to stop and redirect.
"""
end
@doc """
Raises error message for bad stop with no redirect.
"""
def raise_bad_stop_and_no_redirect!() do
raise RuntimeError, """
attempted to stop socket without redirecting.
you must always redirect when stopping a socket, see redirect/2.
"""
end
@doc """
Calls the optional `mount/N` callback, otherwise returns the socket as is.
"""
def maybe_call_mount!(socket, view, args) do
arity = length(args)
if function_exported?(view, :mount, arity) do
socket =
case apply(view, :mount, args) do
{:ok, %Socket{} = socket, opts} when is_list(opts) ->
Enum.reduce(opts, socket, fn {key, val}, acc -> mount_opt(acc, key, val, arity) end)
{:ok, %Socket{} = socket} ->
socket
other ->
raise ArgumentError, """
invalid result returned from #{inspect(view)}.mount/#{length(args)}.
Expected {:ok, socket} | {:ok, socket, opts}, got: #{inspect(other)}
"""
end
if socket.redirected do
raise "cannot redirect socket on mount/#{length(args)}"
end
socket
else
socket
end
end
@doc """
Calls the optional `update/2` callback, otherwise update the socket directly.
"""
def maybe_call_update!(socket, component, assigns) do
if function_exported?(component, :update, 2) do
socket =
case component.update(assigns, socket) do
{:ok, %Socket{} = socket} ->
socket
other ->
raise ArgumentError, """
invalid result returned from #{inspect(component)}.update/2.
Expected {:ok, socket}, got: #{inspect(other)}
"""
end
if socket.redirected do
raise "cannot redirect socket on update/2"
end
socket
else
LiveView.assign(socket, assigns)
end
end
defp random_encoded_bytes do
binary = <<
System.system_time(:nanosecond)::64,
:erlang.phash2({node(), self()})::16,
:erlang.unique_integer()::16
>>
Base.url_encode64(binary)
end
defp mount_opt(%Socket{} = socket, key, val, _arity) when key in @mount_opts do
do_mount_opt(socket, key, val)
end
defp mount_opt(%Socket{view: view}, key, val, arity) do
raise ArgumentError, """
invalid option returned from #{inspect(view)}.mount/#{arity}.
Expected keys to be one of #{inspect(@mount_opts)}
got: #{inspect(key)}: #{inspect(val)}
"""
end
defp do_mount_opt(socket, :layout, {mod, template}) when is_atom(mod) and is_binary(template) do
%Socket{socket | private: Map.put(socket.private, :layout, {mod, template})}
end
defp do_mount_opt(_socket, :layout, bad_layout) do
raise ArgumentError,
"the :layout mount option expects a tuple of the form {MyLayoutView, \"my_template.html\"}, " <>
"got: #{inspect(bad_layout)}"
end
defp do_mount_opt(socket, :temporary_assigns, temp_assigns) do
unless Keyword.keyword?(temp_assigns) do
raise "the :temporary_assigns mount option must be keyword list"
end
temp_assigns = Map.new(temp_assigns)
%Socket{
socket
| assigns: Map.merge(temp_assigns, socket.assigns),
private: Map.put(socket.private, :temporary_assigns, temp_assigns)
}
end
defp drop_private(%Socket{private: private} = socket, keys) do
%Socket{socket | private: Map.drop(private, keys)}
end
defp render_view(socket, view, assigns) do
case layout(socket, view) do
{layout_mod, layout_template} -> layout_mod.render(layout_template, assigns)
nil -> view.render(assigns)
end
end
defp layout(socket, view) do
socket.private[:layout] || view.__live__()[:layout]
end
end
| 28.108844 | 106 | 0.636133 |
ff0d3722812d7efd75ac0f33a18326e76efeedf0 | 753 | ex | Elixir | api/web/models/category.ex | AlexYanai/microblogger | 833320759cddd276bc31dabaec6f0c9e2eabb05a | [
"MIT"
] | null | null | null | api/web/models/category.ex | AlexYanai/microblogger | 833320759cddd276bc31dabaec6f0c9e2eabb05a | [
"MIT"
] | null | null | null | api/web/models/category.ex | AlexYanai/microblogger | 833320759cddd276bc31dabaec6f0c9e2eabb05a | [
"MIT"
] | null | null | null | defmodule Microblogger.Category do
use Microblogger.Web, :model
alias Microblogger.{Category, PostCategory, Repo}
schema "categories" do
field :name, :string
field :description, :string
many_to_many :posts, Microblogger.Post, join_through: "post_categories"
timestamps()
end
def find_by_name(n) do
(
from c in Category,
where: c.name == ^n,
select: c
) |> Repo.one
end
def find_by_cc(n, m) do
(
from c in PostCategory,
where: c.post_id == ^n and c.category_id == ^m,
select: c
)
|> Repo.one
end
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:name, :description])
|> validate_required([:name, :description])
end
end
| 20.351351 | 75 | 0.613546 |
ff0d3efacfd0fc8ee513bff18f1263dc867c9fe5 | 1,563 | ex | Elixir | Server/Megauni/specs/funcs.ex | da99/megauni.alpha | 65e402cc139e45f043e67a9384caf77d1fcbe5b7 | [
"MIT"
] | null | null | null | Server/Megauni/specs/funcs.ex | da99/megauni.alpha | 65e402cc139e45f043e67a9384caf77d1fcbe5b7 | [
"MIT"
] | null | null | null | Server/Megauni/specs/funcs.ex | da99/megauni.alpha | 65e402cc139e45f043e67a9384caf77d1fcbe5b7 | [
"MIT"
] | null | null | null |
defmodule Megauni.Spec_Funcs do
def query(stack, prog, env) do
{query, prog, env} = JSON_Applet.take(prog, 1, env)
{:ok, %{columns: keys, rows: rows}} = Ecto.Adapters.SQL.query( Megauni.Repos.Main, query, [] )
rows = Enum.map rows, fn(r) ->
Enum.reduce Enum.zip(keys,r), %{}, fn({key, val}, map) ->
Map.put map, key, val
end
end
{stack ++ [rows], prog, env}
end
def type_ids(stack, prog, env) do
ids = Regex.scan(
~r/RETURN\s+(\d+)\s?;/,
File.read!("lib/Megauni/migrates/__-02-link_type_id.sql")
)
|> Enum.map(fn([_match, id]) ->
id
end)
{stack ++ [ids], prog, env}
end
def lookup_kv(k, env) do
cond do
x = Regex.run(~r/^card_(.)?\.linked_at$/, k) ->
id = env["card_#{List.last(x)}"]["id"]
row = Megauni.Model.query(
"""
SELECT created_at AS linked_at FROM link
WHERE type_id = name_to_type_id('LINK')
AND a_id = $1 AND a_type_id = name_to_type_id('CARD')
AND b_type_id = name_to_type_id('SCREEN_NAME')
ORDER BY id DESC
LIMIT 1
""", [id]
) |> Megauni.Model.one_row;
{row["linked_at"], env}
x = Regex.run(~r/^sn_(.)?\.id$/, k) ->
name = env["sn_#{List.last(x)}"]["screen_name"]
{Screen_Name.read_id!(name), env}
k == "sn.id" ->
name = env["sn"]["screen_name"]
{Screen_Name.read_id!(name), env}
true ->
{k, env}
end # === cond
end
end # === defmodule Megauni.Spec_Funcs
| 26.948276 | 98 | 0.528471 |
ff0d52cadbd05c094f9d93b0c3ea865e9d5ecffd | 102,619 | ex | Elixir | lib/aws/generated/kms.ex | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/kms.ex | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/kms.ex | salemove/aws-elixir | debdf6482158a71a57636ac664c911e682093395 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.KMS do
@moduledoc """
AWS Key Management Service
AWS Key Management Service (AWS KMS) is an encryption and key management web
service.
This guide describes the AWS KMS operations that you can call programmatically.
For general information about AWS KMS, see the [ *AWS Key Management Service Developer Guide* ](https://docs.aws.amazon.com/kms/latest/developerguide/).
AWS provides SDKs that consist of libraries and sample code for various
programming languages and platforms (Java, Ruby, .Net, macOS, Android, etc.).
The SDKs provide a convenient way to create programmatic access to AWS KMS and
other AWS services. For example, the SDKs take care of tasks such as signing
requests (see below), managing errors, and retrying requests automatically. For
more information about the AWS SDKs, including how to download and install them,
see [Tools for Amazon Web Services](http://aws.amazon.com/tools/).
We recommend that you use the AWS SDKs to make programmatic API calls to AWS
KMS.
Clients must support TLS (Transport Layer Security) 1.0. We recommend TLS 1.2.
Clients must also support cipher suites with Perfect Forward Secrecy (PFS) such
as Ephemeral Diffie-Hellman (DHE) or Elliptic Curve Ephemeral Diffie-Hellman
(ECDHE). Most modern systems such as Java 7 and later support these modes.
## Signing Requests
Requests must be signed by using an access key ID and a secret access key. We
strongly recommend that you *do not* use your AWS account (root) access key ID
and secret key for everyday work with AWS KMS. Instead, use the access key ID
and secret access key for an IAM user. You can also use the AWS Security Token
Service to generate temporary security credentials that you can use to sign
requests.
All AWS KMS operations require [Signature Version
4](https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html).
## Logging API Requests
AWS KMS supports AWS CloudTrail, a service that logs AWS API calls and related
events for your AWS account and delivers them to an Amazon S3 bucket that you
specify. By using the information collected by CloudTrail, you can determine
what requests were made to AWS KMS, who made the request, when it was made, and
so on. To learn more about CloudTrail, including how to turn it on and find your
log files, see the [AWS CloudTrail User Guide](https://docs.aws.amazon.com/awscloudtrail/latest/userguide/).
## Additional Resources
For more information about credentials and request signing, see the following:
* [AWS Security Credentials](https://docs.aws.amazon.com/general/latest/gr/aws-security-credentials.html)
- This topic provides general information about the types of credentials used
for accessing AWS.
* [Temporary Security Credentials](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp.html)
- This section of the *IAM User Guide* describes how to create and use temporary
security credentials.
* [Signature Version 4 Signing Process](https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html)
- This set of topics walks you through the process of signing a request using an
access key ID and a secret access key.
## Commonly Used API Operations
Of the API operations discussed in this guide, the following will prove the most
useful for most applications. You will likely perform operations other than
these, such as creating keys and assigning policies, by using the console.
* `Encrypt`
* `Decrypt`
* `GenerateDataKey`
* `GenerateDataKeyWithoutPlaintext`
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "KMS",
api_version: "2014-11-01",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "kms",
global?: false,
protocol: "json",
service_id: "KMS",
signature_version: "v4",
signing_name: "kms",
target_prefix: "TrentService"
}
end
@doc """
Cancels the deletion of a customer master key (CMK).
When this operation succeeds, the key state of the CMK is `Disabled`. To enable
the CMK, use `EnableKey`.
For more information about scheduling and canceling deletion of a CMK, see
[Deleting Customer Master Keys](https://docs.aws.amazon.com/kms/latest/developerguide/deleting-keys.html)
in the *AWS Key Management Service Developer Guide*.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:CancelKeyDeletion](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `ScheduleKeyDeletion`
"""
def cancel_key_deletion(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CancelKeyDeletion", input, options)
end
@doc """
Connects or reconnects a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
to its associated AWS CloudHSM cluster.
The custom key store must be connected before you can create customer master
keys (CMKs) in the key store or use the CMKs it contains. You can disconnect and
reconnect a custom key store at any time.
To connect a custom key store, its associated AWS CloudHSM cluster must have at
least one active HSM. To get the number of active HSMs in a cluster, use the
[DescribeClusters](https://docs.aws.amazon.com/cloudhsm/latest/APIReference/API_DescribeClusters.html) operation. To add HSMs to the cluster, use the
[CreateHsm](https://docs.aws.amazon.com/cloudhsm/latest/APIReference/API_CreateHsm.html)
operation. Also, the [ `kmsuser` crypto user](https://docs.aws.amazon.com/kms/latest/developerguide/key-store-concepts.html#concept-kmsuser)
(CU) must not be logged into the cluster. This prevents AWS KMS from using this
account to log in.
The connection process can take an extended amount of time to complete; up to 20
minutes. This operation starts the connection process, but it does not wait for
it to complete. When it succeeds, this operation quickly returns an HTTP 200
response and a JSON object with no properties. However, this response does not
indicate that the custom key store is connected. To get the connection state of
the custom key store, use the `DescribeCustomKeyStores` operation.
During the connection process, AWS KMS finds the AWS CloudHSM cluster that is
associated with the custom key store, creates the connection infrastructure,
connects to the cluster, logs into the AWS CloudHSM client as the `kmsuser` CU,
and rotates its password.
The `ConnectCustomKeyStore` operation might fail for various reasons. To find
the reason, use the `DescribeCustomKeyStores` operation and see the
`ConnectionErrorCode` in the response. For help interpreting the
`ConnectionErrorCode`, see `CustomKeyStoresListEntry`.
To fix the failure, use the `DisconnectCustomKeyStore` operation to disconnect
the custom key store, correct the error, use the `UpdateCustomKeyStore`
operation if necessary, and then use `ConnectCustomKeyStore` again.
If you are having trouble connecting or disconnecting a custom key store, see
[Troubleshooting a Custom Key Store](https://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html)
in the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a custom key
store in a different AWS account.
**Required permissions**:
[kms:ConnectCustomKeyStore](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
## Related operations
* `CreateCustomKeyStore`
* `DeleteCustomKeyStore`
* `DescribeCustomKeyStores`
* `DisconnectCustomKeyStore`
* `UpdateCustomKeyStore`
"""
def connect_custom_key_store(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ConnectCustomKeyStore", input, options)
end
@doc """
Creates a friendly name for a customer master key (CMK).
You can use an alias to identify a CMK in the AWS KMS console, in the
`DescribeKey` operation and in [cryptographic operations](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations),
such as `Encrypt` and `GenerateDataKey`.
You can also change the CMK that's associated with the alias (`UpdateAlias`) or
delete the alias (`DeleteAlias`) at any time. These operations don't affect the
underlying CMK.
You can associate the alias with any customer managed CMK in the same AWS
Region. Each alias is associated with only on CMK at a time, but a CMK can have
multiple aliases. A valid CMK is required. You can't create an alias without a
CMK.
The alias must be unique in the account and Region, but you can have aliases
with the same name in different Regions. For detailed information about aliases,
see [Using aliases](https://docs.aws.amazon.com/kms/latest/developerguide/kms-alias.html)
in the *AWS Key Management Service Developer Guide*.
This operation does not return a response. To get the alias that you created,
use the `ListAliases` operation.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on an alias in a
different AWS account.
## Required permissions
*
[kms:CreateAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) on the alias (IAM policy).
*
[kms:CreateAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
on the CMK (key policy).
For details, see [Controlling access to aliases](https://docs.aws.amazon.com/kms/latest/developerguide/kms-alias.html#alias-access)
in the *AWS Key Management Service Developer Guide*.
## Related operations:
* `DeleteAlias`
* `ListAliases`
* `UpdateAlias`
"""
def create_alias(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateAlias", input, options)
end
@doc """
Creates a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
that is associated with an [AWS CloudHSM cluster](https://docs.aws.amazon.com/cloudhsm/latest/userguide/clusters.html)
that you own and manage.
This operation is part of the [Custom Key Store feature](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
feature in AWS KMS, which combines the convenience and extensive integration of
AWS KMS with the isolation and control of a single-tenant key store.
Before you create the custom key store, you must assemble the required elements,
including an AWS CloudHSM cluster that fulfills the requirements for a custom
key store. For details about the required elements, see [Assemble the Prerequisites](https://docs.aws.amazon.com/kms/latest/developerguide/create-keystore.html#before-keystore)
in the *AWS Key Management Service Developer Guide*.
When the operation completes successfully, it returns the ID of the new custom
key store. Before you can use your new custom key store, you need to use the
`ConnectCustomKeyStore` operation to connect the new key store to its AWS
CloudHSM cluster. Even if you are not going to use your custom key store
immediately, you might want to connect it to verify that all settings are
correct and then disconnect it until you are ready to use it.
For help with failures, see [Troubleshooting a Custom Key Store](https://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html)
in the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a custom key
store in a different AWS account.
**Required permissions**:
[kms:CreateCustomKeyStore](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy).
## Related operations:
* `ConnectCustomKeyStore`
* `DeleteCustomKeyStore`
* `DescribeCustomKeyStores`
* `DisconnectCustomKeyStore`
* `UpdateCustomKeyStore`
"""
def create_custom_key_store(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateCustomKeyStore", input, options)
end
@doc """
Adds a grant to a customer master key (CMK).
The grant allows the grantee principal to use the CMK when the conditions
specified in the grant are met. When setting permissions, grants are an
alternative to key policies.
To create a grant that allows a [cryptographic operation](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations)
only when the request includes a particular [encryption context](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context),
use the `Constraints` parameter. For details, see `GrantConstraints`.
You can create grants on symmetric and asymmetric CMKs. However, if the grant
allows an operation that the CMK does not support, `CreateGrant` fails with a
`ValidationException`.
* Grants for symmetric CMKs cannot allow operations that are not
supported for symmetric CMKs, including `Sign`, `Verify`, and `GetPublicKey`.
(There are limited exceptions to this rule for legacy operations, but you should
not create a grant for an operation that AWS KMS does not support.)
* Grants for asymmetric CMKs cannot allow operations that are not
supported for asymmetric CMKs, including operations that [generate data keys](https://docs.aws.amazon.com/kms/latest/APIReference/API_GenerateDataKey)
or [data key pairs](https://docs.aws.amazon.com/kms/latest/APIReference/API_GenerateDataKeyPair),
or operations related to [automatic key rotation](https://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html),
[imported key material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html),
or CMKs in [custom key stores](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html).
* Grants for asymmetric CMKs with a `KeyUsage` of `ENCRYPT_DECRYPT`
cannot allow the `Sign` or `Verify` operations. Grants for asymmetric CMKs with
a `KeyUsage` of `SIGN_VERIFY` cannot allow the `Encrypt` or `Decrypt`
operations.
* Grants for asymmetric CMKs cannot include an encryption context
grant constraint. An encryption context is not supported on asymmetric CMKs.
For information about symmetric and asymmetric CMKs, see [Using Symmetric and Asymmetric
CMKs](https://docs.aws.amazon.com/kms/latest/developerguide/symmetric-asymmetric.html)
in the *AWS Key Management Service Developer Guide*. For more information about
grants, see
[Grants](https://docs.aws.amazon.com/kms/latest/developerguide/grants.html) in the * *AWS Key Management Service Developer Guide* *.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master
Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation on a CMK in a different
AWS account, specify the key ARN in the value of the `KeyId` parameter.
**Required permissions**:
[kms:CreateGrant](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `ListGrants`
* `ListRetirableGrants`
* `RetireGrant`
* `RevokeGrant`
"""
def create_grant(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateGrant", input, options)
end
@doc """
Creates a unique customer managed [customer master key](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#master-keys)
(CMK) in your AWS account and Region.
You can use the `CreateKey` operation to create symmetric or asymmetric CMKs.
* **Symmetric CMKs** contain a 256-bit symmetric key that never
leaves AWS KMS unencrypted. To use the CMK, you must call AWS KMS. You can use a
symmetric CMK to encrypt and decrypt small amounts of data, but they are
typically used to generate [data keys](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#data-keys)
and [data keys pairs](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#data-key-pairs).
For details, see `GenerateDataKey` and `GenerateDataKeyPair`.
* **Asymmetric CMKs** can contain an RSA key pair or an Elliptic
Curve (ECC) key pair. The private key in an asymmetric CMK never leaves AWS KMS
unencrypted. However, you can use the `GetPublicKey` operation to download the
public key so it can be used outside of AWS KMS. CMKs with RSA key pairs can be
used to encrypt or decrypt data or sign and verify messages (but not both). CMKs
with ECC key pairs can be used only to sign and verify messages.
For information about symmetric and asymmetric CMKs, see [Using Symmetric and Asymmetric
CMKs](https://docs.aws.amazon.com/kms/latest/developerguide/symmetric-asymmetric.html)
in the *AWS Key Management Service Developer Guide*.
To create different types of CMKs, use the following guidance:
## Definitions
### Asymmetric CMKs
To create an asymmetric CMK, use the `CustomerMasterKeySpec` parameter to
specify the type of key material in the CMK. Then, use the `KeyUsage` parameter
to determine whether the CMK will be used to encrypt and decrypt or sign and
verify. You can't change these properties after the CMK is created.
### Symmetric CMKs
When creating a symmetric CMK, you don't need to specify the
`CustomerMasterKeySpec` or `KeyUsage` parameters. The default value for
`CustomerMasterKeySpec`, `SYMMETRIC_DEFAULT`, and the default value for
`KeyUsage`, `ENCRYPT_DECRYPT`, are the only valid values for symmetric CMKs.
### Imported Key Material
To import your own key material, begin by creating a symmetric CMK with no key
material. To do this, use the `Origin` parameter of `CreateKey` with a value of
`EXTERNAL`. Next, use `GetParametersForImport` operation to get a public key and
import token, and use the public key to encrypt your key material. Then, use
`ImportKeyMaterial` with your import token to import the key material. For
step-by-step instructions, see [Importing Key Material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html)
in the * *AWS Key Management Service Developer Guide* *. You cannot import the
key material into an asymmetric CMK.
### Custom Key Stores
To create a symmetric CMK in a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html),
use the `CustomKeyStoreId` parameter to specify the custom key store. You must
also use the `Origin` parameter with a value of `AWS_CLOUDHSM`. The AWS CloudHSM
cluster that is associated with the custom key store must have at least two
active HSMs in different Availability Zones in the AWS Region.
You cannot create an asymmetric CMK in a custom key store. For information about
custom key stores in AWS KMS see [Using Custom Key Stores](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
in the * *AWS Key Management Service Developer Guide* *.
**Cross-account use**: No. You cannot use this operation to create a CMK in a
different AWS account.
**Required permissions**:
[kms:CreateKey](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) (IAM policy). To use the `Tags` parameter,
[kms:TagResource](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy). For examples and information about related permissions, see [Allow a user to create
CMKs](https://docs.aws.amazon.com/kms/latest/developerguide/iam-policies.html#iam-policy-example-create-key)
in the *AWS Key Management Service Developer Guide*.
## Related operations:
* `DescribeKey`
* `ListKeys`
* `ScheduleKeyDeletion`
"""
def create_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateKey", input, options)
end
@doc """
Decrypts ciphertext that was encrypted by a AWS KMS customer master key (CMK)
using any of the following operations:
* `Encrypt`
* `GenerateDataKey`
* `GenerateDataKeyPair`
* `GenerateDataKeyWithoutPlaintext`
* `GenerateDataKeyPairWithoutPlaintext`
You can use this operation to decrypt ciphertext that was encrypted under a
symmetric or asymmetric CMK.
When the CMK is asymmetric, you must specify the CMK and the encryption
algorithm that was used to encrypt the ciphertext. For information about
symmetric and asymmetric CMKs, see [Using Symmetric and Asymmetric CMKs](https://docs.aws.amazon.com/kms/latest/developerguide/symmetric-asymmetric.html)
in the *AWS Key Management Service Developer Guide*.
The Decrypt operation also decrypts ciphertext that was encrypted outside of AWS
KMS by the public key in an AWS KMS asymmetric CMK. However, it cannot decrypt
ciphertext produced by other libraries, such as the [AWS Encryption SDK](https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/) or
[Amazon S3 client-side encryption](https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingClientSideEncryption.html).
These libraries return a ciphertext format that is incompatible with AWS KMS.
If the ciphertext was encrypted under a symmetric CMK, the `KeyId` parameter is
optional. AWS KMS can get this information from metadata that it adds to the
symmetric ciphertext blob. This feature adds durability to your implementation
by ensuring that authorized users can decrypt ciphertext decades after it was
encrypted, even if they've lost track of the CMK ID. However, specifying the CMK
is always recommended as a best practice. When you use the `KeyId` parameter to
specify a CMK, AWS KMS only uses the CMK you specify. If the ciphertext was
encrypted under a different CMK, the `Decrypt` operation fails. This practice
ensures that you use the CMK that you intend.
Whenever possible, use key policies to give users permission to call the
`Decrypt` operation on a particular CMK, instead of using IAM policies.
Otherwise, you might create an IAM user policy that gives the user `Decrypt`
permission on all CMKs. This user could decrypt ciphertext that was encrypted by
CMKs in other accounts if the key policy for the cross-account CMK permits it.
If you must use an IAM policy for `Decrypt` permissions, limit the user to
particular CMKs or particular trusted accounts. For details, see [Best practices for IAM
policies](https://docs.aws.amazon.com/kms/latest/developerguide/iam-policies.html#iam-policies-best-practices)
in the *AWS Key Management Service Developer Guide*.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: Yes. You can decrypt a ciphertext using a CMK in a
different AWS account.
**Required permissions**:
[kms:Decrypt](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `Encrypt`
* `GenerateDataKey`
* `GenerateDataKeyPair`
* `ReEncrypt`
"""
def decrypt(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "Decrypt", input, options)
end
@doc """
Deletes the specified alias.
Because an alias is not a property of a CMK, you can delete and change the
aliases of a CMK without affecting the CMK. Also, aliases do not appear in the
response from the `DescribeKey` operation. To get the aliases of all CMKs, use
the `ListAliases` operation.
Each CMK can have multiple aliases. To change the alias of a CMK, use
`DeleteAlias` to delete the current alias and `CreateAlias` to create a new
alias. To associate an existing alias with a different customer master key
(CMK), call `UpdateAlias`.
**Cross-account use**: No. You cannot perform this operation on an alias in a
different AWS account.
## Required permissions
*
[kms:DeleteAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) on the alias (IAM policy).
*
[kms:DeleteAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
on the CMK (key policy).
For details, see [Controlling access to aliases](https://docs.aws.amazon.com/kms/latest/developerguide/kms-alias.html#alias-access)
in the *AWS Key Management Service Developer Guide*.
## Related operations:
* `CreateAlias`
* `ListAliases`
* `UpdateAlias`
"""
def delete_alias(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteAlias", input, options)
end
@doc """
Deletes a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html).
This operation does not delete the AWS CloudHSM cluster that is associated with
the custom key store, or affect any users or keys in the cluster.
The custom key store that you delete cannot contain any AWS KMS [customer master keys
(CMKs)](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#master_keys).
Before deleting the key store, verify that you will never need to use any of the
CMKs in the key store for any [cryptographic operations](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations).
Then, use `ScheduleKeyDeletion` to delete the AWS KMS customer master keys
(CMKs) from the key store. When the scheduled waiting period expires, the
`ScheduleKeyDeletion` operation deletes the CMKs. Then it makes a best effort to
delete the key material from the associated cluster. However, you might need to
manually [delete the orphaned key material](https://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html#fix-keystore-orphaned-key)
from the cluster and its backups.
After all CMKs are deleted from AWS KMS, use `DisconnectCustomKeyStore` to
disconnect the key store from AWS KMS. Then, you can delete the custom key
store.
Instead of deleting the custom key store, consider using
`DisconnectCustomKeyStore` to disconnect it from AWS KMS. While the key store is
disconnected, you cannot create or use the CMKs in the key store. But, you do
not need to delete CMKs and you can reconnect a disconnected custom key store at
any time.
If the operation succeeds, it returns a JSON object with no properties.
This operation is part of the [Custom Key Store feature](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
feature in AWS KMS, which combines the convenience and extensive integration of
AWS KMS with the isolation and control of a single-tenant key store.
**Cross-account use**: No. You cannot perform this operation on a custom key
store in a different AWS account.
**Required permissions**:
[kms:DeleteCustomKeyStore](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
## Related operations:
* `ConnectCustomKeyStore`
* `CreateCustomKeyStore`
* `DescribeCustomKeyStores`
* `DisconnectCustomKeyStore`
* `UpdateCustomKeyStore`
"""
def delete_custom_key_store(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteCustomKeyStore", input, options)
end
@doc """
Deletes key material that you previously imported.
This operation makes the specified customer master key (CMK) unusable. For more
information about importing key material into AWS KMS, see [Importing Key Material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html)
in the *AWS Key Management Service Developer Guide*.
When the specified CMK is in the `PendingDeletion` state, this operation does
not change the CMK's state. Otherwise, it changes the CMK's state to
`PendingImport`.
After you delete key material, you can use `ImportKeyMaterial` to reimport the
same key material into the CMK.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:DeleteImportedKeyMaterial](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `GetParametersForImport`
* `ImportKeyMaterial`
"""
def delete_imported_key_material(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteImportedKeyMaterial", input, options)
end
@doc """
Gets information about [custom key stores](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
in the account and region.
This operation is part of the [Custom Key Store feature](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
feature in AWS KMS, which combines the convenience and extensive integration of
AWS KMS with the isolation and control of a single-tenant key store.
By default, this operation returns information about all custom key stores in
the account and region. To get only information about a particular custom key
store, use either the `CustomKeyStoreName` or `CustomKeyStoreId` parameter (but
not both).
To determine whether the custom key store is connected to its AWS CloudHSM
cluster, use the `ConnectionState` element in the response. If an attempt to
connect the custom key store failed, the `ConnectionState` value is `FAILED` and
the `ConnectionErrorCode` element in the response indicates the cause of the
failure. For help interpreting the `ConnectionErrorCode`, see
`CustomKeyStoresListEntry`.
Custom key stores have a `DISCONNECTED` connection state if the key store has
never been connected or you use the `DisconnectCustomKeyStore` operation to
disconnect it. If your custom key store state is `CONNECTED` but you are having
trouble using it, make sure that its associated AWS CloudHSM cluster is active
and contains the minimum number of HSMs required for the operation, if any.
For help repairing your custom key store, see the [Troubleshooting Custom Key Stores](https://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html)
topic in the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a custom key
store in a different AWS account.
**Required permissions**:
[kms:DescribeCustomKeyStores](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
## Related operations:
* `ConnectCustomKeyStore`
* `CreateCustomKeyStore`
* `DeleteCustomKeyStore`
* `DisconnectCustomKeyStore`
* `UpdateCustomKeyStore`
"""
def describe_custom_key_stores(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeCustomKeyStores", input, options)
end
@doc """
Provides detailed information about a customer master key (CMK).
You can run `DescribeKey` on a [customer managed CMK](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#customer-cmk)
or an [AWS managed CMK](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#aws-managed-cmk).
This detailed information includes the key ARN, creation date (and deletion
date, if applicable), the key state, and the origin and expiration date (if any)
of the key material. For CMKs in custom key stores, it includes information
about the custom key store, such as the key store ID and the AWS CloudHSM
cluster ID. It includes fields, like `KeySpec`, that help you distinguish
symmetric from asymmetric CMKs. It also provides information that is
particularly important to asymmetric CMKs, such as the key usage (encryption or
signing) and the encryption algorithms or signing algorithms that the CMK
supports.
`DescribeKey` does not return the following information:
* Aliases associated with the CMK. To get this information, use
`ListAliases`.
* Whether automatic key rotation is enabled on the CMK. To get this
information, use `GetKeyRotationStatus`. Also, some key states prevent a CMK
from being automatically rotated. For details, see [How Automatic Key Rotation Works](https://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html#rotate-keys-how-it-works)
in *AWS Key Management Service Developer Guide*.
* Tags on the CMK. To get this information, use `ListResourceTags`.
* Key policies and grants on the CMK. To get this information, use
`GetKeyPolicy` and `ListGrants`.
If you call the `DescribeKey` operation on a *predefined AWS alias*, that is, an
AWS alias with no key ID, AWS KMS creates an [AWS managed CMK](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#master_keys).
Then, it associates the alias with the new CMK, and returns the `KeyId` and
`Arn` of the new CMK in the response.
**Cross-account use**: Yes. To perform this operation with a CMK in a different
AWS account, specify the key ARN or alias ARN in the value of the `KeyId`
parameter.
**Required permissions**:
[kms:DescribeKey](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `GetKeyPolicy`
* `GetKeyRotationStatus`
* `ListAliases`
* `ListGrants`
* `ListKeys`
* `ListResourceTags`
* `ListRetirableGrants`
"""
def describe_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeKey", input, options)
end
@doc """
Sets the state of a customer master key (CMK) to disabled.
This change temporarily prevents use of the CMK for [cryptographic operations](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations).
For more information about how key state affects the use of a CMK, see [How Key State Affects the Use of a Customer Master
Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the * *AWS Key Management Service Developer Guide* *.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:DisableKey](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `EnableKey`
"""
def disable_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisableKey", input, options)
end
@doc """
Disables [automatic rotation of the key material](https://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html)
for the specified symmetric customer master key (CMK).
You cannot enable automatic rotation of asymmetric CMKs, CMKs with imported key
material, or CMKs in a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html).
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:DisableKeyRotation](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `EnableKeyRotation`
* `GetKeyRotationStatus`
"""
def disable_key_rotation(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisableKeyRotation", input, options)
end
@doc """
Disconnects the [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
from its associated AWS CloudHSM cluster.
While a custom key store is disconnected, you can manage the custom key store
and its customer master keys (CMKs), but you cannot create or use CMKs in the
custom key store. You can reconnect the custom key store at any time.
While a custom key store is disconnected, all attempts to create customer master
keys (CMKs) in the custom key store or to use existing CMKs in [cryptographic operations](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations)
will fail. This action can prevent users from storing and accessing sensitive
data.
To find the connection state of a custom key store, use the
`DescribeCustomKeyStores` operation. To reconnect a custom key store, use the
`ConnectCustomKeyStore` operation.
If the operation succeeds, it returns a JSON object with no properties.
This operation is part of the [Custom Key Store feature](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
feature in AWS KMS, which combines the convenience and extensive integration of
AWS KMS with the isolation and control of a single-tenant key store.
**Cross-account use**: No. You cannot perform this operation on a custom key
store in a different AWS account.
**Required permissions**:
[kms:DisconnectCustomKeyStore](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
## Related operations:
* `ConnectCustomKeyStore`
* `CreateCustomKeyStore`
* `DeleteCustomKeyStore`
* `DescribeCustomKeyStores`
* `UpdateCustomKeyStore`
"""
def disconnect_custom_key_store(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisconnectCustomKeyStore", input, options)
end
@doc """
Sets the key state of a customer master key (CMK) to enabled.
This allows you to use the CMK for [cryptographic operations](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#cryptographic-operations).
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:EnableKey](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `DisableKey`
"""
def enable_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "EnableKey", input, options)
end
@doc """
Enables [automatic rotation of the key material](https://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html)
for the specified symmetric customer master key (CMK).
You cannot enable automatic rotation of asymmetric CMKs, CMKs with imported key
material, or CMKs in a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html).
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:EnableKeyRotation](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `DisableKeyRotation`
* `GetKeyRotationStatus`
"""
def enable_key_rotation(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "EnableKeyRotation", input, options)
end
@doc """
Encrypts plaintext into ciphertext by using a customer master key (CMK).
The `Encrypt` operation has two primary use cases:
* You can encrypt small amounts of arbitrary data, such as a
personal identifier or database password, or other sensitive information.
* You can use the `Encrypt` operation to move encrypted data from
one AWS Region to another. For example, in Region A, generate a data key and use
the plaintext key to encrypt your data. Then, in Region A, use the `Encrypt`
operation to encrypt the plaintext data key under a CMK in Region B. Now, you
can move the encrypted data and the encrypted data key to Region B. When
necessary, you can decrypt the encrypted data key and the encrypted data
entirely within in Region B.
You don't need to use the `Encrypt` operation to encrypt a data key. The
`GenerateDataKey` and `GenerateDataKeyPair` operations return a plaintext data
key and an encrypted copy of that data key.
When you encrypt data, you must specify a symmetric or asymmetric CMK to use in
the encryption operation. The CMK must have a `KeyUsage` value of
`ENCRYPT_DECRYPT.` To find the `KeyUsage` of a CMK, use the `DescribeKey`
operation.
If you use a symmetric CMK, you can use an encryption context to add additional
security to your encryption operation. If you specify an `EncryptionContext`
when encrypting data, you must specify the same encryption context (a
case-sensitive exact match) when decrypting the data. Otherwise, the request to
decrypt fails with an `InvalidCiphertextException`. For more information, see
[Encryption Context](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context)
in the *AWS Key Management Service Developer Guide*.
If you specify an asymmetric CMK, you must also specify the encryption
algorithm. The algorithm must be compatible with the CMK type.
When you use an asymmetric CMK to encrypt or reencrypt data, be sure to record
the CMK and encryption algorithm that you choose. You will be required to
provide the same CMK and encryption algorithm when you decrypt the data. If the
CMK and algorithm do not match the values used to encrypt the data, the decrypt
operation fails.
You are not required to supply the CMK ID and encryption algorithm when you
decrypt with symmetric CMKs because AWS KMS stores this information in the
ciphertext blob. AWS KMS cannot store metadata in ciphertext generated with
asymmetric keys. The standard format for asymmetric key ciphertext does not
include configurable fields.
The maximum size of the data that you can encrypt varies with the type of CMK
and the encryption algorithm that you choose.
* Symmetric CMKs
* `SYMMETRIC_DEFAULT`: 4096 bytes
* `RSA_2048`
* `RSAES_OAEP_SHA_1`: 214 bytes
* `RSAES_OAEP_SHA_256`: 190 bytes
* `RSA_3072`
* `RSAES_OAEP_SHA_1`: 342 bytes
* `RSAES_OAEP_SHA_256`: 318 bytes
* `RSA_4096`
* `RSAES_OAEP_SHA_1`: 470 bytes
* `RSAES_OAEP_SHA_256`: 446 bytes
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a CMK in a different
AWS account, specify the key ARN or alias ARN in the value of the `KeyId`
parameter.
**Required permissions**:
[kms:Encrypt](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `Decrypt`
* `GenerateDataKey`
* `GenerateDataKeyPair`
"""
def encrypt(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "Encrypt", input, options)
end
@doc """
Generates a unique symmetric data key for client-side encryption.
This operation returns a plaintext copy of the data key and a copy that is
encrypted under a customer master key (CMK) that you specify. You can use the
plaintext key to encrypt your data outside of AWS KMS and store the encrypted
data key with the encrypted data.
`GenerateDataKey` returns a unique data key for each request. The bytes in the
plaintext key are not related to the caller or the CMK.
To generate a data key, specify the symmetric CMK that will be used to encrypt
the data key. You cannot use an asymmetric CMK to generate data keys. To get the
type of your CMK, use the `DescribeKey` operation. You must also specify the
length of the data key. Use either the `KeySpec` or `NumberOfBytes` parameters
(but not both). For 128-bit and 256-bit data keys, use the `KeySpec` parameter.
To get only an encrypted copy of the data key, use
`GenerateDataKeyWithoutPlaintext`. To generate an asymmetric data key pair, use
the `GenerateDataKeyPair` or `GenerateDataKeyPairWithoutPlaintext` operation. To
get a cryptographically secure random byte string, use `GenerateRandom`.
You can use the optional encryption context to add additional security to the
encryption operation. If you specify an `EncryptionContext`, you must specify
the same encryption context (a case-sensitive exact match) when decrypting the
encrypted data key. Otherwise, the request to decrypt fails with an
`InvalidCiphertextException`. For more information, see [Encryption Context](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context)
in the *AWS Key Management Service Developer Guide*.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
## How to use your data key
We recommend that you use the following pattern to encrypt data locally in your
application. You can write your own code or use a client-side encryption
library, such as the [AWS Encryption SDK](https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/), the
[Amazon DynamoDB Encryption Client](https://docs.aws.amazon.com/dynamodb-encryption-client/latest/devguide/),
or [Amazon S3 client-side encryption](https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingClientSideEncryption.html)
to do these tasks for you.
To encrypt data outside of AWS KMS:
1. Use the `GenerateDataKey` operation to get a data key.
2. Use the plaintext data key (in the `Plaintext` field of the
response) to encrypt your data outside of AWS KMS. Then erase the plaintext data
key from memory.
3. Store the encrypted data key (in the `CiphertextBlob` field of
the response) with the encrypted data.
To decrypt data outside of AWS KMS:
1. Use the `Decrypt` operation to decrypt the encrypted data key.
The operation returns a plaintext copy of the data key.
2. Use the plaintext data key to decrypt data outside of AWS KMS,
then erase the plaintext data key from memory.
**Cross-account use**: Yes. To perform this operation with a CMK in a different
AWS account, specify the key ARN or alias ARN in the value of the `KeyId`
parameter.
**Required permissions**:
[kms:GenerateDataKey](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `Decrypt`
* `Encrypt`
* `GenerateDataKeyPair`
* `GenerateDataKeyPairWithoutPlaintext`
* `GenerateDataKeyWithoutPlaintext`
"""
def generate_data_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GenerateDataKey", input, options)
end
@doc """
Generates a unique asymmetric data key pair.
The `GenerateDataKeyPair` operation returns a plaintext public key, a plaintext
private key, and a copy of the private key that is encrypted under the symmetric
CMK you specify. You can use the data key pair to perform asymmetric
cryptography outside of AWS KMS.
`GenerateDataKeyPair` returns a unique data key pair for each request. The bytes
in the keys are not related to the caller or the CMK that is used to encrypt the
private key.
You can use the public key that `GenerateDataKeyPair` returns to encrypt data or
verify a signature outside of AWS KMS. Then, store the encrypted private key
with the data. When you are ready to decrypt data or sign a message, you can use
the `Decrypt` operation to decrypt the encrypted private key.
To generate a data key pair, you must specify a symmetric customer master key
(CMK) to encrypt the private key in a data key pair. You cannot use an
asymmetric CMK or a CMK in a custom key store. To get the type and origin of
your CMK, use the `DescribeKey` operation.
If you are using the data key pair to encrypt data, or for any operation where
you don't immediately need a private key, consider using the
`GenerateDataKeyPairWithoutPlaintext` operation.
`GenerateDataKeyPairWithoutPlaintext` returns a plaintext public key and an
encrypted private key, but omits the plaintext private key that you need only to
decrypt ciphertext or sign a message. Later, when you need to decrypt the data
or sign a message, use the `Decrypt` operation to decrypt the encrypted private
key in the data key pair.
You can use the optional encryption context to add additional security to the
encryption operation. If you specify an `EncryptionContext`, you must specify
the same encryption context (a case-sensitive exact match) when decrypting the
encrypted data key. Otherwise, the request to decrypt fails with an
`InvalidCiphertextException`. For more information, see [Encryption Context](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context)
in the *AWS Key Management Service Developer Guide*.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a CMK in a different
AWS account, specify the key ARN or alias ARN in the value of the `KeyId`
parameter.
**Required permissions**:
[kms:GenerateDataKeyPair](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `Decrypt`
* `Encrypt`
* `GenerateDataKey`
* `GenerateDataKeyPairWithoutPlaintext`
* `GenerateDataKeyWithoutPlaintext`
"""
def generate_data_key_pair(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GenerateDataKeyPair", input, options)
end
@doc """
Generates a unique asymmetric data key pair.
The `GenerateDataKeyPairWithoutPlaintext` operation returns a plaintext public
key and a copy of the private key that is encrypted under the symmetric CMK you
specify. Unlike `GenerateDataKeyPair`, this operation does not return a
plaintext private key.
To generate a data key pair, you must specify a symmetric customer master key
(CMK) to encrypt the private key in the data key pair. You cannot use an
asymmetric CMK or a CMK in a custom key store. To get the type and origin of
your CMK, use the `KeySpec` field in the `DescribeKey` response.
You can use the public key that `GenerateDataKeyPairWithoutPlaintext` returns to
encrypt data or verify a signature outside of AWS KMS. Then, store the encrypted
private key with the data. When you are ready to decrypt data or sign a message,
you can use the `Decrypt` operation to decrypt the encrypted private key.
`GenerateDataKeyPairWithoutPlaintext` returns a unique data key pair for each
request. The bytes in the key are not related to the caller or CMK that is used
to encrypt the private key.
You can use the optional encryption context to add additional security to the
encryption operation. If you specify an `EncryptionContext`, you must specify
the same encryption context (a case-sensitive exact match) when decrypting the
encrypted data key. Otherwise, the request to decrypt fails with an
`InvalidCiphertextException`. For more information, see [Encryption Context](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context)
in the *AWS Key Management Service Developer Guide*.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a CMK in a different
AWS account, specify the key ARN or alias ARN in the value of the `KeyId`
parameter.
**Required permissions**:
[kms:GenerateDataKeyPairWithoutPlaintext](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `Decrypt`
* `Encrypt`
* `GenerateDataKey`
* `GenerateDataKeyPair`
* `GenerateDataKeyWithoutPlaintext`
"""
def generate_data_key_pair_without_plaintext(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"GenerateDataKeyPairWithoutPlaintext",
input,
options
)
end
@doc """
Generates a unique symmetric data key.
This operation returns a data key that is encrypted under a customer master key
(CMK) that you specify. To request an asymmetric data key pair, use the
`GenerateDataKeyPair` or `GenerateDataKeyPairWithoutPlaintext` operations.
`GenerateDataKeyWithoutPlaintext` is identical to the `GenerateDataKey`
operation except that returns only the encrypted copy of the data key. This
operation is useful for systems that need to encrypt data at some point, but not
immediately. When you need to encrypt the data, you call the `Decrypt` operation
on the encrypted copy of the key.
It's also useful in distributed systems with different levels of trust. For
example, you might store encrypted data in containers. One component of your
system creates new containers and stores an encrypted data key with each
container. Then, a different component puts the data into the containers. That
component first decrypts the data key, uses the plaintext data key to encrypt
data, puts the encrypted data into the container, and then destroys the
plaintext data key. In this system, the component that creates the containers
never sees the plaintext data key.
`GenerateDataKeyWithoutPlaintext` returns a unique data key for each request.
The bytes in the keys are not related to the caller or CMK that is used to
encrypt the private key.
To generate a data key, you must specify the symmetric customer master key (CMK)
that is used to encrypt the data key. You cannot use an asymmetric CMK to
generate a data key. To get the type of your CMK, use the `DescribeKey`
operation.
If the operation succeeds, you will find the encrypted copy of the data key in
the `CiphertextBlob` field.
You can use the optional encryption context to add additional security to the
encryption operation. If you specify an `EncryptionContext`, you must specify
the same encryption context (a case-sensitive exact match) when decrypting the
encrypted data key. Otherwise, the request to decrypt fails with an
`InvalidCiphertextException`. For more information, see [Encryption Context](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context)
in the *AWS Key Management Service Developer Guide*.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a CMK in a different
AWS account, specify the key ARN or alias ARN in the value of the `KeyId`
parameter.
**Required permissions**:
[kms:GenerateDataKeyWithoutPlaintext](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `Decrypt`
* `Encrypt`
* `GenerateDataKey`
* `GenerateDataKeyPair`
* `GenerateDataKeyPairWithoutPlaintext`
"""
def generate_data_key_without_plaintext(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GenerateDataKeyWithoutPlaintext", input, options)
end
@doc """
Returns a random byte string that is cryptographically secure.
By default, the random byte string is generated in AWS KMS. To generate the byte
string in the AWS CloudHSM cluster that is associated with a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html),
specify the custom key store ID.
For more information about entropy and random number generation, see the [AWS Key Management Service Cryptographic
Details](https://d0.awsstatic.com/whitepapers/KMS-Cryptographic-Details.pdf)
whitepaper.
**Required permissions**:
[kms:GenerateRandom](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
"""
def generate_random(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GenerateRandom", input, options)
end
@doc """
Gets a key policy attached to the specified customer master key (CMK).
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:GetKeyPolicy](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `PutKeyPolicy`
"""
def get_key_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetKeyPolicy", input, options)
end
@doc """
Gets a Boolean value that indicates whether [automatic rotation of the key material](https://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html)
is enabled for the specified customer master key (CMK).
You cannot enable automatic rotation of asymmetric CMKs, CMKs with imported key
material, or CMKs in a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html).
The key rotation status for these CMKs is always `false`.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
* Disabled: The key rotation status does not change when you disable
a CMK. However, while the CMK is disabled, AWS KMS does not rotate the backing
key.
* Pending deletion: While a CMK is pending deletion, its key
rotation status is `false` and AWS KMS does not rotate the backing key. If you
cancel the deletion, the original key rotation status is restored.
**Cross-account use**: Yes. To perform this operation on a CMK in a different
AWS account, specify the key ARN in the value of the `KeyId` parameter.
**Required permissions**:
[kms:GetKeyRotationStatus](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `DisableKeyRotation`
* `EnableKeyRotation`
"""
def get_key_rotation_status(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetKeyRotationStatus", input, options)
end
@doc """
Returns the items you need to import key material into a symmetric, customer
managed customer master key (CMK).
For more information about importing key material into AWS KMS, see [Importing Key
Material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html)
in the *AWS Key Management Service Developer Guide*.
This operation returns a public key and an import token. Use the public key to
encrypt the symmetric key material. Store the import token to send with a
subsequent `ImportKeyMaterial` request.
You must specify the key ID of the symmetric CMK into which you will import key
material. This CMK's `Origin` must be `EXTERNAL`. You must also specify the
wrapping algorithm and type of wrapping key (public key) that you will use to
encrypt the key material. You cannot perform this operation on an asymmetric CMK
or on any CMK in a different AWS account.
To import key material, you must use the public key and import token from the
same response. These items are valid for 24 hours. The expiration date and time
appear in the `GetParametersForImport` response. You cannot use an expired token
in an `ImportKeyMaterial` request. If your key and token expire, send another
`GetParametersForImport` request.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:GetParametersForImport](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `ImportKeyMaterial`
* `DeleteImportedKeyMaterial`
"""
def get_parameters_for_import(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetParametersForImport", input, options)
end
@doc """
Returns the public key of an asymmetric CMK.
Unlike the private key of a asymmetric CMK, which never leaves AWS KMS
unencrypted, callers with `kms:GetPublicKey` permission can download the public
key of an asymmetric CMK. You can share the public key to allow others to
encrypt messages and verify signatures outside of AWS KMS. For information about
symmetric and asymmetric CMKs, see [Using Symmetric and Asymmetric CMKs](https://docs.aws.amazon.com/kms/latest/developerguide/symmetric-asymmetric.html)
in the *AWS Key Management Service Developer Guide*.
You do not need to download the public key. Instead, you can use the public key
within AWS KMS by calling the `Encrypt`, `ReEncrypt`, or `Verify` operations
with the identifier of an asymmetric CMK. When you use the public key within AWS
KMS, you benefit from the authentication, authorization, and logging that are
part of every AWS KMS operation. You also reduce of risk of encrypting data that
cannot be decrypted. These features are not effective outside of AWS KMS. For
details, see [Special Considerations for Downloading Public Keys](https://docs.aws.amazon.com/kms/latest/developerguide/download-public-key.html#download-public-key-considerations).
To help you use the public key safely outside of AWS KMS, `GetPublicKey` returns
important information about the public key in the response, including:
*
[CustomerMasterKeySpec](https://docs.aws.amazon.com/kms/latest/APIReference/API_GetPublicKey.html#KMS-GetPublicKey-response-CustomerMasterKeySpec): The type of key material in the public key, such as `RSA_4096` or
`ECC_NIST_P521`.
*
[KeyUsage](https://docs.aws.amazon.com/kms/latest/APIReference/API_GetPublicKey.html#KMS-GetPublicKey-response-KeyUsage):
Whether the key is used for encryption or signing.
*
[EncryptionAlgorithms](https://docs.aws.amazon.com/kms/latest/APIReference/API_GetPublicKey.html#KMS-GetPublicKey-response-EncryptionAlgorithms) or
[SigningAlgorithms](https://docs.aws.amazon.com/kms/latest/APIReference/API_GetPublicKey.html#KMS-GetPublicKey-response-SigningAlgorithms):
A list of the encryption algorithms or the signing algorithms for the key.
Although AWS KMS cannot enforce these restrictions on external operations, it is
crucial that you use this information to prevent the public key from being used
improperly. For example, you can prevent a public signing key from being used
encrypt data, or prevent a public key from being used with an encryption
algorithm that is not supported by AWS KMS. You can also avoid errors, such as
using the wrong signing algorithm in a verification operation.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a CMK in a different
AWS account, specify the key ARN or alias ARN in the value of the `KeyId`
parameter.
**Required permissions**:
[kms:GetPublicKey](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `CreateKey`
"""
def get_public_key(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetPublicKey", input, options)
end
@doc """
Imports key material into an existing symmetric AWS KMS customer master key
(CMK) that was created without key material.
After you successfully import key material into a CMK, you can [reimport the same key
material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html#reimport-key-material)
into that CMK, but you cannot import different key material.
You cannot perform this operation on an asymmetric CMK or on any CMK in a
different AWS account. For more information about creating CMKs with no key
material and then importing key material, see [Importing Key Material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html)
in the *AWS Key Management Service Developer Guide*.
Before using this operation, call `GetParametersForImport`. Its response
includes a public key and an import token. Use the public key to encrypt the key
material. Then, submit the import token from the same `GetParametersForImport`
response.
When calling this operation, you must specify the following values:
* The key ID or key ARN of a CMK with no key material. Its `Origin`
must be `EXTERNAL`.
To create a CMK with no key material, call `CreateKey` and set the value of its
`Origin` parameter to `EXTERNAL`. To get the `Origin` of a CMK, call
`DescribeKey`.)
* The encrypted key material. To get the public key to encrypt the
key material, call `GetParametersForImport`.
* The import token that `GetParametersForImport` returned. You must
use a public key and token from the same `GetParametersForImport` response.
* Whether the key material expires and if so, when. If you set an
expiration date, AWS KMS deletes the key material from the CMK on the specified
date, and the CMK becomes unusable. To use the CMK again, you must reimport the
same key material. The only way to change an expiration date is by reimporting
the same key material and specifying a new expiration date.
When this operation is successful, the key state of the CMK changes from
`PendingImport` to `Enabled`, and you can use the CMK.
If this operation fails, use the exception to help determine the problem. If the
error is related to the key material, the import token, or wrapping key, use
`GetParametersForImport` to get a new public key and import token for the CMK
and repeat the import procedure. For help, see [How To Import Key Material](https://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html#importing-keys-overview)
in the *AWS Key Management Service Developer Guide*.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:ImportKeyMaterial](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `DeleteImportedKeyMaterial`
* `GetParametersForImport`
"""
def import_key_material(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ImportKeyMaterial", input, options)
end
@doc """
Gets a list of aliases in the caller's AWS account and region.
For more information about aliases, see `CreateAlias`.
By default, the `ListAliases` operation returns all aliases in the account and
region. To get only the aliases associated with a particular customer master key
(CMK), use the `KeyId` parameter.
The `ListAliases` response can include aliases that you created and associated
with your customer managed CMKs, and aliases that AWS created and associated
with AWS managed CMKs in your account. You can recognize AWS aliases because
their names have the format `aws/<service-name>`, such as `aws/dynamodb`.
The response might also include aliases that have no `TargetKeyId` field. These
are predefined aliases that AWS has created but has not yet associated with a
CMK. Aliases that AWS creates in your account, including predefined aliases, do
not count against your [AWS KMS aliases quota](https://docs.aws.amazon.com/kms/latest/developerguide/limits.html#aliases-limit).
**Cross-account use**: No. `ListAliases` does not return aliases in other AWS
accounts.
**Required permissions**:
[kms:ListAliases](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) (IAM policy)
For details, see [Controlling access to
aliases](https://docs.aws.amazon.com/kms/latest/developerguide/kms-alias.html#alias-access)
in the *AWS Key Management Service Developer Guide*.
## Related operations:
* `CreateAlias`
* `DeleteAlias`
* `UpdateAlias`
"""
def list_aliases(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListAliases", input, options)
end
@doc """
Gets a list of all grants for the specified customer master key (CMK).
You must specify the CMK in all requests. You can filter the grant list by grant
ID or grantee principal.
The `GranteePrincipal` field in the `ListGrants` response usually contains the
user or role designated as the grantee principal in the grant. However, when the
grantee principal in the grant is an AWS service, the `GranteePrincipal` field
contains the [service principal](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html#principal-services),
which might represent several different grantee principals.
**Cross-account use**: Yes. To perform this operation on a CMK in a different
AWS account, specify the key ARN in the value of the `KeyId` parameter.
**Required permissions**:
[kms:ListGrants](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `CreateGrant`
* `ListRetirableGrants`
* `RetireGrant`
* `RevokeGrant`
"""
def list_grants(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListGrants", input, options)
end
@doc """
Gets the names of the key policies that are attached to a customer master key
(CMK).
This operation is designed to get policy names that you can use in a
`GetKeyPolicy` operation. However, the only valid policy name is `default`.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:ListKeyPolicies](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `GetKeyPolicy`
* `PutKeyPolicy`
"""
def list_key_policies(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListKeyPolicies", input, options)
end
@doc """
Gets a list of all customer master keys (CMKs) in the caller's AWS account and
Region.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:ListKeys](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
## Related operations:
* `CreateKey`
* `DescribeKey`
* `ListAliases`
* `ListResourceTags`
"""
def list_keys(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListKeys", input, options)
end
@doc """
Returns all tags on the specified customer master key (CMK).
For general information about tags, including the format and syntax, see
[Tagging AWS resources](https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html) in
the *Amazon Web Services General Reference*. For information about using tags in
AWS KMS, see [Tagging keys](https://docs.aws.amazon.com/kms/latest/developerguide/tagging-keys.html).
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:ListResourceTags](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `TagResource`
* `UntagResource`
"""
def list_resource_tags(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListResourceTags", input, options)
end
@doc """
Returns all grants in which the specified principal is the `RetiringPrincipal`
in the grant.
You can specify any principal in your AWS account. The grants that are returned
include grants for CMKs in your AWS account and other AWS accounts.
You might use this operation to determine which grants you may retire. To retire
a grant, use the `RetireGrant` operation.
**Cross-account use**: You must specify a principal in your AWS account.
However, this operation can return grants in any AWS account. You do not need
`kms:ListRetirableGrants` permission (or any other additional permission) in any
AWS account other than your own.
**Required permissions**:
[kms:ListRetirableGrants](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy) in your AWS account.
## Related operations:
* `CreateGrant`
* `ListGrants`
* `RetireGrant`
* `RevokeGrant`
"""
def list_retirable_grants(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListRetirableGrants", input, options)
end
@doc """
Attaches a key policy to the specified customer master key (CMK).
For more information about key policies, see [Key Policies](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html)
in the *AWS Key Management Service Developer Guide*. For help writing and
formatting a JSON policy document, see the [IAM JSON Policy Reference](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies.html)
in the * *IAM User Guide* *. For examples of adding a key policy in multiple
programming languages, see [Setting a key policy](https://docs.aws.amazon.com/kms/latest/developerguide/programming-key-policies.html#put-policy)
in the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:PutKeyPolicy](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `GetKeyPolicy`
"""
def put_key_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutKeyPolicy", input, options)
end
@doc """
Decrypts ciphertext and then reencrypts it entirely within AWS KMS.
You can use this operation to change the customer master key (CMK) under which
data is encrypted, such as when you [manually rotate](https://docs.aws.amazon.com/kms/latest/developerguide/rotate-keys.html#rotate-keys-manually)
a CMK or change the CMK that protects a ciphertext. You can also use it to
reencrypt ciphertext under the same CMK, such as to change the [encryption context](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context)
of a ciphertext.
The `ReEncrypt` operation can decrypt ciphertext that was encrypted by using an
AWS KMS CMK in an AWS KMS operation, such as `Encrypt` or `GenerateDataKey`. It
can also decrypt ciphertext that was encrypted by using the public key of an
[asymmetric CMK](https://docs.aws.amazon.com/kms/latest/developerguide/symm-asymm-concepts.html#asymmetric-cmks)
outside of AWS KMS. However, it cannot decrypt ciphertext produced by other
libraries, such as the [AWS Encryption SDK](https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/) or
[Amazon S3 client-side encryption](https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingClientSideEncryption.html).
These libraries return a ciphertext format that is incompatible with AWS KMS.
When you use the `ReEncrypt` operation, you need to provide information for the
decrypt operation and the subsequent encrypt operation.
* If your ciphertext was encrypted under an asymmetric CMK, you must
use the `SourceKeyId` parameter to identify the CMK that encrypted the
ciphertext. You must also supply the encryption algorithm that was used. This
information is required to decrypt the data.
* If your ciphertext was encrypted under a symmetric CMK, the
`SourceKeyId` parameter is optional. AWS KMS can get this information from
metadata that it adds to the symmetric ciphertext blob. This feature adds
durability to your implementation by ensuring that authorized users can decrypt
ciphertext decades after it was encrypted, even if they've lost track of the CMK
ID. However, specifying the source CMK is always recommended as a best practice.
When you use the `SourceKeyId` parameter to specify a CMK, AWS KMS uses only the
CMK you specify. If the ciphertext was encrypted under a different CMK, the
`ReEncrypt` operation fails. This practice ensures that you use the CMK that you
intend.
* To reencrypt the data, you must use the `DestinationKeyId`
parameter specify the CMK that re-encrypts the data after it is decrypted. You
can select a symmetric or asymmetric CMK. If the destination CMK is an
asymmetric CMK, you must also provide the encryption algorithm. The algorithm
that you choose must be compatible with the CMK.
When you use an asymmetric CMK to encrypt or reencrypt data, be sure to record
the CMK and encryption algorithm that you choose. You will be required to
provide the same CMK and encryption algorithm when you decrypt the data. If the
CMK and algorithm do not match the values used to encrypt the data, the decrypt
operation fails.
You are not required to supply the CMK ID and encryption algorithm when you
decrypt with symmetric CMKs because AWS KMS stores this information in the
ciphertext blob. AWS KMS cannot store metadata in ciphertext generated with
asymmetric keys. The standard format for asymmetric key ciphertext does not
include configurable fields.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: Yes. The source CMK and destination CMK can be in
different AWS accounts. Either or both CMKs can be in a different account than
the caller.
**Required permissions**:
*
[kms:ReEncryptFrom](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) permission on the source CMK (key policy)
*
[kms:ReEncryptTo](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
permission on the destination CMK (key policy)
To permit reencryption from or to a CMK, include the `"kms:ReEncrypt*"`
permission in your [key policy](https://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html).
This permission is automatically included in the key policy when you use the
console to create a CMK. But you must include it manually when you create a CMK
programmatically or when you use the `PutKeyPolicy` operation to set a key
policy.
## Related operations:
* `Decrypt`
* `Encrypt`
* `GenerateDataKey`
* `GenerateDataKeyPair`
"""
def re_encrypt(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ReEncrypt", input, options)
end
@doc """
Retires a grant.
To clean up, you can retire a grant when you're done using it. You should revoke
a grant when you intend to actively deny operations that depend on it. The
following are permitted to call this API:
* The AWS account (root user) under which the grant was created
* The `RetiringPrincipal`, if present in the grant
* The `GranteePrincipal`, if `RetireGrant` is an operation specified
in the grant
You must identify the grant to retire by its grant token or by a combination of
the grant ID and the Amazon Resource Name (ARN) of the customer master key
(CMK). A grant token is a unique variable-length base64-encoded string. A grant
ID is a 64 character unique identifier of a grant. The `CreateGrant` operation
returns both.
**Cross-account use**: Yes. You can retire a grant on a CMK in a different AWS
account.
**Required permissions:**: Permission to retire a grant is specified in the
grant. You cannot control access to this operation in a policy. For more
information, see [Using grants](https://docs.aws.amazon.com/kms/latest/developerguide/grants.html) in
the *AWS Key Management Service Developer Guide*.
## Related operations:
* `CreateGrant`
* `ListGrants`
* `ListRetirableGrants`
* `RevokeGrant`
"""
def retire_grant(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RetireGrant", input, options)
end
@doc """
Revokes the specified grant for the specified customer master key (CMK).
You can revoke a grant to actively deny operations that depend on it.
**Cross-account use**: Yes. To perform this operation on a CMK in a different
AWS account, specify the key ARN in the value of the `KeyId` parameter.
**Required permissions**:
[kms:RevokeGrant](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations:
* `CreateGrant`
* `ListGrants`
* `ListRetirableGrants`
* `RetireGrant`
"""
def revoke_grant(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RevokeGrant", input, options)
end
@doc """
Schedules the deletion of a customer master key (CMK).
You may provide a waiting period, specified in days, before deletion occurs. If
you do not provide a waiting period, the default period of 30 days is used. When
this operation is successful, the key state of the CMK changes to
`PendingDeletion`. Before the waiting period ends, you can use
`CancelKeyDeletion` to cancel the deletion of the CMK. After the waiting period
ends, AWS KMS deletes the CMK and all AWS KMS data associated with it, including
all aliases that refer to it.
Deleting a CMK is a destructive and potentially dangerous operation. When a CMK
is deleted, all data that was encrypted under the CMK is unrecoverable. To
prevent the use of a CMK without deleting it, use `DisableKey`.
If you schedule deletion of a CMK from a [custom key store](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html),
when the waiting period expires, `ScheduleKeyDeletion` deletes the CMK from AWS
KMS. Then AWS KMS makes a best effort to delete the key material from the
associated AWS CloudHSM cluster. However, you might need to manually [delete the orphaned key
material](https://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html#fix-keystore-orphaned-key)
from the cluster and its backups.
For more information about scheduling a CMK for deletion, see [Deleting Customer Master
Keys](https://docs.aws.amazon.com/kms/latest/developerguide/deleting-keys.html)
in the *AWS Key Management Service Developer Guide*.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:ScheduleKeyDeletion](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations
* `CancelKeyDeletion`
* `DisableKey`
"""
def schedule_key_deletion(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ScheduleKeyDeletion", input, options)
end
@doc """
Creates a [digital signature](https://en.wikipedia.org/wiki/Digital_signature) for a message or message digest by using the private key in an asymmetric CMK.
To verify the signature, use the `Verify` operation, or use the public key in
the same asymmetric CMK outside of AWS KMS. For information about symmetric and
asymmetric CMKs, see [Using Symmetric and Asymmetric
CMKs](https://docs.aws.amazon.com/kms/latest/developerguide/symmetric-asymmetric.html)
in the *AWS Key Management Service Developer Guide*.
Digital signatures are generated and verified by using asymmetric key pair, such
as an RSA or ECC pair that is represented by an asymmetric customer master key
(CMK). The key owner (or an authorized user) uses their private key to sign a
message. Anyone with the public key can verify that the message was signed with
that particular private key and that the message hasn't changed since it was
signed.
To use the `Sign` operation, provide the following information:
* Use the `KeyId` parameter to identify an asymmetric CMK with a
`KeyUsage` value of `SIGN_VERIFY`. To get the `KeyUsage` value of a CMK, use the
`DescribeKey` operation. The caller must have `kms:Sign` permission on the CMK.
* Use the `Message` parameter to specify the message or message
digest to sign. You can submit messages of up to 4096 bytes. To sign a larger
message, generate a hash digest of the message, and then provide the hash digest
in the `Message` parameter. To indicate whether the message is a full message or
a digest, use the `MessageType` parameter.
* Choose a signing algorithm that is compatible with the CMK.
When signing a message, be sure to record the CMK and the signing algorithm.
This information is required to verify the signature.
To verify the signature that this operation generates, use the `Verify`
operation. Or use the `GetPublicKey` operation to download the public key and
then use the public key to verify the signature outside of AWS KMS.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a CMK in a different
AWS account, specify the key ARN or alias ARN in the value of the `KeyId`
parameter.
**Required permissions**:
[kms:Sign](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `Verify`
"""
def sign(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "Sign", input, options)
end
@doc """
Adds or edits tags on a [customer managed CMK](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#customer-cmk).
Each tag consists of a tag key and a tag value, both of which are case-sensitive
strings. The tag value can be an empty (null) string.
To add a tag, specify a new tag key and a tag value. To edit a tag, specify an
existing tag key and a new tag value.
You can use this operation to tag a [customer managed CMK](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#customer-cmk),
but you cannot tag an [AWS managed CMK](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#aws-managed-cmk),
an [AWS owned CMK](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#aws-owned-cmk),
or an alias.
For general information about tags, including the format and syntax, see
[Tagging AWS resources](https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html) in
the *Amazon Web Services General Reference*. For information about using tags in
AWS KMS, see [Tagging keys](https://docs.aws.amazon.com/kms/latest/developerguide/tagging-keys.html).
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:TagResource](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations
* `UntagResource`
* `ListResourceTags`
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Deletes tags from a [customer managed CMK](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#customer-cmk).
To delete a tag, specify the tag key and the CMK.
When it succeeds, the `UntagResource` operation doesn't return any output. Also,
if the specified tag key isn't found on the CMK, it doesn't throw an exception
or return a response. To confirm that the operation worked, use the
`ListResourceTags` operation.
For general information about tags, including the format and syntax, see
[Tagging AWS resources](https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html) in
the *Amazon Web Services General Reference*. For information about using tags in
AWS KMS, see [Tagging keys](https://docs.aws.amazon.com/kms/latest/developerguide/tagging-keys.html).
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:UntagResource](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations
* `TagResource`
* `ListResourceTags`
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Associates an existing AWS KMS alias with a different customer master key (CMK).
Each alias is associated with only one CMK at a time, although a CMK can have
multiple aliases. The alias and the CMK must be in the same AWS account and
region.
The current and new CMK must be the same type (both symmetric or both
asymmetric), and they must have the same key usage (`ENCRYPT_DECRYPT` or
`SIGN_VERIFY`). This restriction prevents errors in code that uses aliases. If
you must assign an alias to a different type of CMK, use `DeleteAlias` to delete
the old alias and `CreateAlias` to create a new alias.
You cannot use `UpdateAlias` to change an alias name. To change an alias name,
use `DeleteAlias` to delete the old alias and `CreateAlias` to create a new
alias.
Because an alias is not a property of a CMK, you can create, update, and delete
the aliases of a CMK without affecting the CMK. Also, aliases do not appear in
the response from the `DescribeKey` operation. To get the aliases of all CMKs in
the account, use the `ListAliases` operation.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
## Required permissions
*
[kms:UpdateAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) on the alias (IAM policy).
*
[kms:UpdateAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
on the current CMK (key policy).
*
[kms:UpdateAlias](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html) on the new CMK (key policy).
For details, see [Controlling access to
aliases](https://docs.aws.amazon.com/kms/latest/developerguide/kms-alias.html#alias-access)
in the *AWS Key Management Service Developer Guide*.
## Related operations:
* `CreateAlias`
* `DeleteAlias`
* `ListAliases`
"""
def update_alias(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateAlias", input, options)
end
@doc """
Changes the properties of a custom key store.
Use the `CustomKeyStoreId` parameter to identify the custom key store you want
to edit. Use the remaining parameters to change the properties of the custom key
store.
You can only update a custom key store that is disconnected. To disconnect the
custom key store, use `DisconnectCustomKeyStore`. To reconnect the custom key
store after the update completes, use `ConnectCustomKeyStore`. To find the
connection state of a custom key store, use the `DescribeCustomKeyStores`
operation.
Use the parameters of `UpdateCustomKeyStore` to edit your keystore settings.
* Use the **NewCustomKeyStoreName** parameter to change the friendly
name of the custom key store to the value that you specify.
* Use the **KeyStorePassword** parameter tell AWS KMS the current
password of the [ `kmsuser` crypto user (CU)](https://docs.aws.amazon.com/kms/latest/developerguide/key-store-concepts.html#concept-kmsuser)
in the associated AWS CloudHSM cluster. You can use this parameter to [fix connection
failures](https://docs.aws.amazon.com/kms/latest/developerguide/fix-keystore.html#fix-keystore-password)
that occur when AWS KMS cannot log into the associated cluster because the
`kmsuser` password has changed. This value does not change the password in the
AWS CloudHSM cluster.
* Use the **CloudHsmClusterId** parameter to associate the custom
key store with a different, but related, AWS CloudHSM cluster. You can use this
parameter to repair a custom key store if its AWS CloudHSM cluster becomes
corrupted or is deleted, or when you need to create or restore a cluster from a
backup.
If the operation succeeds, it returns a JSON object with no properties.
This operation is part of the [Custom Key Store feature](https://docs.aws.amazon.com/kms/latest/developerguide/custom-key-store-overview.html)
feature in AWS KMS, which combines the convenience and extensive integration of
AWS KMS with the isolation and control of a single-tenant key store.
**Cross-account use**: No. You cannot perform this operation on a custom key
store in a different AWS account.
**Required permissions**:
[kms:UpdateCustomKeyStore](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(IAM policy)
## Related operations:
* `ConnectCustomKeyStore`
* `CreateCustomKeyStore`
* `DeleteCustomKeyStore`
* `DescribeCustomKeyStores`
* `DisconnectCustomKeyStore`
"""
def update_custom_key_store(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateCustomKeyStore", input, options)
end
@doc """
Updates the description of a customer master key (CMK).
To see the description of a CMK, use `DescribeKey`.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: No. You cannot perform this operation on a CMK in a
different AWS account.
**Required permissions**:
[kms:UpdateKeyDescription](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
## Related operations
* `CreateKey`
* `DescribeKey`
"""
def update_key_description(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateKeyDescription", input, options)
end
@doc """
Verifies a digital signature that was generated by the `Sign` operation.
Verification confirms that an authorized user signed the message with the
specified CMK and signing algorithm, and the message hasn't changed since it was
signed. If the signature is verified, the value of the `SignatureValid` field in
the response is `True`. If the signature verification fails, the `Verify`
operation fails with an `KMSInvalidSignatureException` exception.
A digital signature is generated by using the private key in an asymmetric CMK.
The signature is verified by using the public key in the same asymmetric CMK.
For information about symmetric and asymmetric CMKs, see [Using Symmetric and Asymmetric
CMKs](https://docs.aws.amazon.com/kms/latest/developerguide/symmetric-asymmetric.html)
in the *AWS Key Management Service Developer Guide*.
To verify a digital signature, you can use the `Verify` operation. Specify the
same asymmetric CMK, message, and signing algorithm that were used to produce
the signature.
You can also verify the digital signature by using the public key of the CMK
outside of AWS KMS. Use the `GetPublicKey` operation to download the public key
in the asymmetric CMK and then use the public key to verify the signature
outside of AWS KMS. The advantage of using the `Verify` operation is that it is
performed within AWS KMS. As a result, it's easy to call, the operation is
performed within the FIPS boundary, it is logged in AWS CloudTrail, and you can
use key policy and IAM policy to determine who is authorized to use the CMK to
verify signatures.
The CMK that you use for this operation must be in a compatible key state. For
details, see [How Key State Affects Use of a Customer Master Key](https://docs.aws.amazon.com/kms/latest/developerguide/key-state.html) in
the *AWS Key Management Service Developer Guide*.
**Cross-account use**: Yes. To perform this operation with a CMK in a different
AWS account, specify the key ARN or alias ARN in the value of the `KeyId`
parameter.
**Required permissions**:
[kms:Verify](https://docs.aws.amazon.com/kms/latest/developerguide/kms-api-permissions-reference.html)
(key policy)
**Related operations**: `Sign`
"""
def verify(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "Verify", input, options)
end
end
| 45.107253 | 215 | 0.753262 |
ff0d60539c413c2bf5983929e5abbb441d738a7c | 1,470 | ex | Elixir | lib/xdr/transactions/operations/set_options_result.ex | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 3 | 2021-08-17T20:32:45.000Z | 2022-03-13T20:26:02.000Z | lib/xdr/transactions/operations/set_options_result.ex | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 45 | 2021-08-12T20:19:41.000Z | 2022-03-27T21:00:10.000Z | lib/xdr/transactions/operations/set_options_result.ex | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 2 | 2021-09-22T23:11:13.000Z | 2022-01-23T03:19:11.000Z | defmodule StellarBase.XDR.Operations.SetOptionsResult do
@moduledoc """
Representation of Stellar `SetOptionsResult` type.
"""
alias StellarBase.XDR.Void
alias StellarBase.XDR.Operations.SetOptionsResultCode
@behaviour XDR.Declaration
@arms [SET_OPTIONS_SUCCESS: Void, default: Void]
@type t :: %__MODULE__{result: any(), code: SetOptionsResultCode.t()}
defstruct [:result, :code]
@spec new(result :: any(), code :: SetOptionsResultCode.t()) :: t()
def new(result, %SetOptionsResultCode{} = code),
do: %__MODULE__{result: result, code: code}
@impl true
def encode_xdr(%__MODULE__{result: result, code: code}) do
code
|> XDR.Union.new(@arms, result)
|> XDR.Union.encode_xdr()
end
@impl true
def encode_xdr!(%__MODULE__{result: result, code: code}) do
code
|> XDR.Union.new(@arms, result)
|> XDR.Union.encode_xdr!()
end
@impl true
def decode_xdr(bytes, spec \\ union_spec())
def decode_xdr(bytes, spec) do
case XDR.Union.decode_xdr(bytes, spec) do
{:ok, {{code, result}, rest}} -> {:ok, {new(result, code), rest}}
error -> error
end
end
@impl true
def decode_xdr!(bytes, spec \\ union_spec())
def decode_xdr!(bytes, spec) do
{{code, result}, rest} = XDR.Union.decode_xdr!(bytes, spec)
{new(result, code), rest}
end
@spec union_spec() :: XDR.Union.t()
defp union_spec do
nil
|> SetOptionsResultCode.new()
|> XDR.Union.new(@arms)
end
end
| 24.915254 | 71 | 0.660544 |
ff0d6558ccc04146702b5c82ae29c716c0e7e020 | 1,073 | exs | Elixir | test/bootleg/config/agent_test.exs | jayjun/bootleg | 2283ad22810178625c45eb2661624fac914befdd | [
"MIT"
] | 423 | 2017-07-19T18:40:45.000Z | 2021-12-03T08:26:15.000Z | test/bootleg/config/agent_test.exs | jayjun/bootleg | 2283ad22810178625c45eb2661624fac914befdd | [
"MIT"
] | 115 | 2017-07-19T18:30:14.000Z | 2020-10-27T20:53:32.000Z | test/bootleg/config/agent_test.exs | jayjun/bootleg | 2283ad22810178625c45eb2661624fac914befdd | [
"MIT"
] | 39 | 2017-07-20T03:14:18.000Z | 2020-11-18T13:39:52.000Z | defmodule Bootleg.Config.AgentTest do
use Bootleg.TestCase, async: false
alias Bootleg.Config.Agent
import ExUnit.CaptureIO
test "stores values for retrieval" do
{:ok, _} = Agent.start_link()
assert Agent.get(:config) == [env: :production]
Agent.put(:config, key: :value, key2: :value)
assert Agent.get(:config) == [key: :value, key2: :value]
Agent.merge(:config, :foo, :bar)
assert Agent.get(:config) == [key: :value, key2: :value, foo: :bar]
end
test "start_link/0 ignores 'already started' errors" do
Agent.start_link()
assert {:ok, _} = Agent.start_link()
end
test "start_link/1 ignores 'already started' errors" do
Agent.start_link()
assert {:ok, _} = Agent.start_link(:foo)
end
test "start_link/0 sets the environment to `production`" do
Agent.start_link()
assert Agent.get(:config) == [env: :production]
end
test "start_link/1 sets the environment to the provided env" do
capture_io(fn ->
Agent.start_link(:bar)
end)
assert Agent.get(:config) == [env: :bar]
end
end
| 26.825 | 71 | 0.662628 |
ff0d6e1b1fb43f0b01f8997ac937a055a46e23d1 | 425 | exs | Elixir | test/nuzzelish_web/views/error_view_test.exs | chrisbodhi/nuzzelish | 4273dc34e5cc2eab11a6c512272b03a60f302e64 | [
"MIT"
] | 2 | 2020-09-07T03:42:36.000Z | 2021-05-04T23:58:43.000Z | test/nuzzelish_web/views/error_view_test.exs | chrisbodhi/nuzzelish | 4273dc34e5cc2eab11a6c512272b03a60f302e64 | [
"MIT"
] | 20 | 2020-01-15T03:26:48.000Z | 2020-02-02T20:53:39.000Z | test/nuzzelish_web/views/error_view_test.exs | chrisbodhi/nuzzelish | 4273dc34e5cc2eab11a6c512272b03a60f302e64 | [
"MIT"
] | null | null | null | defmodule NuzzelishWeb.ErrorViewTest do
use NuzzelishWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(NuzzelishWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(NuzzelishWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end
| 28.333333 | 94 | 0.738824 |
ff0d715b3092bab337aa4ac8e7b52bf16946f539 | 2,726 | exs | Elixir | test/ex_oauth2_provider/plug_test.exs | loopsocial/ex_oauth2_provider | 59d177f1c7581e1d794823279067022b1598f5f2 | [
"MIT"
] | null | null | null | test/ex_oauth2_provider/plug_test.exs | loopsocial/ex_oauth2_provider | 59d177f1c7581e1d794823279067022b1598f5f2 | [
"MIT"
] | null | null | null | test/ex_oauth2_provider/plug_test.exs | loopsocial/ex_oauth2_provider | 59d177f1c7581e1d794823279067022b1598f5f2 | [
"MIT"
] | null | null | null | defmodule ExOauth2Provider.PlugTest do
use ExOauth2Provider.ConnCase
alias ExOauth2Provider.Plug
test "authenticated?/1", context do
refute Plug.authenticated?(context.conn)
new_conn = Plug.set_current_access_token(context.conn, {:ok, "secret"})
assert Plug.authenticated?(new_conn)
end
test "authenticated?/2", context do
refute Plug.authenticated?(context.conn, :secret)
new_conn = Plug.set_current_access_token(context.conn, {:ok, "secret"}, :secret)
assert Plug.authenticated?(new_conn, :secret)
end
test "current_resource_owner/1 with no resource", context do
assert Plug.current_resource_owner(context.conn) == nil
end
test "current_resource_owner/1 with error", context do
new_conn = Plug.set_current_access_token(context.conn, {:error, :error})
assert Plug.current_resource_owner(new_conn) == nil
end
test "current_resource_owner/1 with resource", context do
new_conn = Plug.set_current_access_token(context.conn, {:ok, %{resource_owner: "user"}})
assert Plug.current_resource_owner(new_conn) == "user"
end
test "current_resource_owner/2 with no resource", context do
assert Plug.current_resource_owner(context.conn, :secret) == nil
end
test "current_resource_owner/2 with resource", context do
new_conn =
Plug.set_current_access_token(context.conn, {:ok, %{resource_owner: "user"}}, :secret)
assert Plug.current_resource_owner(new_conn, :secret) == "user"
end
test "set_current_access_token/2", context do
new_conn = Plug.set_current_access_token(context.conn, {:ok, "token"})
assert Plug.current_access_token(new_conn) == "token"
end
test "set_current_access_token/3", context do
new_conn = Plug.set_current_access_token(context.conn, {:ok, "token"}, :secret)
assert Plug.current_access_token(new_conn, :secret) == "token"
end
test "current_access_token/1 with no token", context do
assert Plug.current_access_token(context.conn) == nil
end
test "current_access_token/1 with token", context do
new_conn = Plug.set_current_access_token(context.conn, {:ok, "token"})
assert Plug.current_access_token(new_conn) == "token"
end
test "current_access_token/1 with error", context do
new_conn = Plug.set_current_access_token(context.conn, {:error, :error})
assert Plug.current_access_token(new_conn) == nil
end
test "current_access_token/2 with no token", context do
assert Plug.current_access_token(context.conn, :secret) == nil
end
test "current_access_token/2 with token", context do
new_conn = Plug.set_current_access_token(context.conn, {:ok, "token"}, :secret)
assert Plug.current_access_token(new_conn, :secret) == "token"
end
end
| 35.868421 | 92 | 0.739545 |
ff0d719fb407aa34ae7db62dc2ae04c86d6725bb | 1,133 | exs | Elixir | config/config.exs | gmalette/droppable_queue | 73fd1faa3d91acc6b6246eb80715ffb6ceaa705f | [
"MIT"
] | 1 | 2016-11-15T19:54:13.000Z | 2016-11-15T19:54:13.000Z | config/config.exs | gmalette/droppable_queue | 73fd1faa3d91acc6b6246eb80715ffb6ceaa705f | [
"MIT"
] | null | null | null | config/config.exs | gmalette/droppable_queue | 73fd1faa3d91acc6b6246eb80715ffb6ceaa705f | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :droppable_queue, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:droppable_queue, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.548387 | 73 | 0.754634 |
ff0d96c2e12ca0b1c524565f4a27461f910eb21a | 3,372 | exs | Elixir | test/dnsimple/response_test.exs | remiprev/dnsimple-elixir | 7a8eb0cec64797f3e72601b5395d8629836ef904 | [
"MIT"
] | null | null | null | test/dnsimple/response_test.exs | remiprev/dnsimple-elixir | 7a8eb0cec64797f3e72601b5395d8629836ef904 | [
"MIT"
] | 14 | 2021-02-19T07:09:55.000Z | 2022-02-24T12:33:37.000Z | test/dnsimple/response_test.exs | littleairmada/dnsimple-elixir | a1b71a9c84d1a440f86199b8f48754e1c88ca19f | [
"MIT"
] | null | null | null | defmodule Dnsimple.ResponseTest do
use TestCase, async: false
use ExVCR.Mock, adapter: ExVCR.Adapter.Hackney
doctest Dnsimple.Response
@client %Dnsimple.Client{access_token: "x", base_url: "https://api.dnsimple.test"}
describe ".parse" do
test "extracts the response body into structs WITH a data attribute" do
use_cassette :stub, ExvcrUtils.response_fixture("getDomain/success.http", method: "get") do
http_response = Dnsimple.Client.execute(@client, "get", "/v2/1010/domains/1")
{:ok, response} = Dnsimple.Response.parse(http_response, %{"data" => %Dnsimple.Domain{}})
assert response.data.__struct__ == Dnsimple.Domain
end
end
test "extracts the response body into NESTED structs WITH a data attribute" do
use_cassette :stub, ExvcrUtils.response_fixture("getTldExtendedAttributes/success.http", method: "get") do
http_response = Dnsimple.Client.execute(@client, "get", "/v2/tlds/com/extended_attributes")
{:ok, response} = Dnsimple.Response.parse(http_response, %{"data" => [%Dnsimple.TldExtendedAttribute{options: [%Dnsimple.TldExtendedAttribute.Option{}]}]})
[attribute | _] = response.data
assert attribute.__struct__ == Dnsimple.TldExtendedAttribute
[option | _] = attribute.options
assert option.__struct__ == Dnsimple.TldExtendedAttribute.Option
end
end
test "extracts the response body into structs WITHOUT a data attribute" do
use_cassette :stub, ExvcrUtils.response_fixture("oauthAccessToken/success.http", method: "post") do
http_response = Dnsimple.Client.execute(@client, "post", "/oauth/access_token")
{:ok, response} = Dnsimple.Response.parse(http_response, %Dnsimple.OauthToken{})
assert response.data.__struct__ == Dnsimple.OauthToken
end
end
test "parses a response without extracting data" do
use_cassette :stub, ExvcrUtils.response_fixture("deleteDomain/success.http", method: "delete") do
http_response = {:ok, http} = Dnsimple.Client.execute(@client, "delete", "/path")
{:ok, response} = Dnsimple.Response.parse(http_response, nil)
assert response.http_response == http
assert response.data == nil
end
end
test "parses the rate-limit" do
use_cassette :stub, ExvcrUtils.response_fixture("whoami/success.http", method: "get") do
http_response = Dnsimple.Client.execute(@client, "get", "/path")
{:ok, response} = Dnsimple.Response.parse(http_response, nil)
assert response.rate_limit == 4000
assert response.rate_limit_remaining == 3991
assert response.rate_limit_reset == 1450451976
end
end
test "parses pagination" do
use_cassette :stub, ExvcrUtils.response_fixture("pages-1of3.http", method: "get") do
http_response = Dnsimple.Client.execute(@client, "get", "/path")
{:ok, response} = Dnsimple.Response.parse(http_response, %{"data" => [%Dnsimple.Domain{}], "pagination" => %Dnsimple.Response.Pagination{}})
assert response.data != nil
assert response.pagination != nil
assert response.pagination.current_page == 1
assert response.pagination.per_page == 2
assert response.pagination.total_pages == 3
assert response.pagination.total_entries == 5
end
end
end
end
| 43.230769 | 163 | 0.688909 |
ff0da5ee1963710ec8b242d389e4485b6c3bd24d | 952 | exs | Elixir | test/test_helper.exs | lumenlunae/forage | af85889bb5a2e154d1aa92e98454c29b6d724120 | [
"MIT"
] | null | null | null | test/test_helper.exs | lumenlunae/forage | af85889bb5a2e154d1aa92e98454c29b6d724120 | [
"MIT"
] | null | null | null | test/test_helper.exs | lumenlunae/forage | af85889bb5a2e154d1aa92e98454c29b6d724120 | [
"MIT"
] | null | null | null | defmodule TestSchemas.PrimarySchema do
use Ecto.Schema
import Ecto.Changeset
alias TestSchemas.RemoteSchema
schema "dummy_schema" do
field :string_field, :string
field :integer_field, :integer
belongs_to :owner, RemoteSchema
end
@doc false
def changeset(remote_schema, attrs) do
remote_schema
|> cast(attrs, [:string_field, :integer_field])
|> validate_required([:string_field, :integer_field])
end
end
defmodule TestSchemas.RemoteSchema do
use Ecto.Schema
import Ecto.Changeset
alias TestSchemas.PrimarySchema
schema "dummy_schema" do
field :remote_string_field, :string
field :remote_integer_field, :integer
has_many :children, PrimarySchema
end
@doc false
def changeset(remote_schema, attrs) do
remote_schema
|> cast(attrs, [:remote_string_field, :remote_integer_field])
|> validate_required([:remote_string_field, :remote_integer_field])
end
end
ExUnit.start()
| 23.219512 | 71 | 0.746849 |
ff0da9ce63796ec9e4fd0741de468c24ff3bbed4 | 313 | exs | Elixir | test/pg/ecto_migration.exs | KamilZielinski/ecto_enum | 4cbcb359b63102edea3db4e984a33d4d09e3f0c0 | [
"MIT"
] | null | null | null | test/pg/ecto_migration.exs | KamilZielinski/ecto_enum | 4cbcb359b63102edea3db4e984a33d4d09e3f0c0 | [
"MIT"
] | null | null | null | test/pg/ecto_migration.exs | KamilZielinski/ecto_enum | 4cbcb359b63102edea3db4e984a33d4d09e3f0c0 | [
"MIT"
] | null | null | null | defmodule Ecto.Integration.Migration do
use Ecto.Migration
def change do
create table(:users) do
add(:status, :integer)
end
execute("CREATE TYPE status AS ENUM ('registered', 'active', 'inactive', 'archived')")
create table(:users_pg) do
add(:status, :status)
end
end
end
| 19.5625 | 90 | 0.654952 |
ff0dca4adff0584fa678c7e0bda47fe29f29142a | 2,238 | exs | Elixir | test/acceptance/html1/atx_headers_test.exs | ZeLarpMaster/earmark | 35c9661d6647059e507c0278347e21d92351c417 | [
"Apache-1.1"
] | null | null | null | test/acceptance/html1/atx_headers_test.exs | ZeLarpMaster/earmark | 35c9661d6647059e507c0278347e21d92351c417 | [
"Apache-1.1"
] | null | null | null | test/acceptance/html1/atx_headers_test.exs | ZeLarpMaster/earmark | 35c9661d6647059e507c0278347e21d92351c417 | [
"Apache-1.1"
] | null | null | null | defmodule Acceptance.Html1.AtxHeadersTest do
use ExUnit.Case, async: true
import Support.Html1Helpers
@moduletag :html1
describe "ATX headers" do
test "from one to six" do
markdown = "# foo\n## foo\n### foo\n#### foo\n##### foo\n###### foo\n"
html = Enum.map(1..6, fn x -> ["<h#{x}>\n", " foo\n", "</h#{x}>\n"] end) |> Enum.join()
messages = []
assert to_html1(markdown) == {:ok, html, messages}
end
test "seven? kidding, right?" do
markdown = "####### foo\n"
html = "<p>\n ####### foo\n</p>\n"
messages = []
assert to_html1(markdown) == {:ok, html, messages}
end
test "sticky (better than to have no glue)" do
markdown = "#5 bolt\n\n#foobar\n"
html = "<p>\n #5 bolt\n</p>\n<p>\n #foobar\n</p>\n"
messages = []
assert to_html1(markdown) == {:ok, html, messages}
end
test "close escape" do
markdown = "\\## foo\n"
html = "<p>\n ## foo\n</p>\n"
messages = []
assert to_html1(markdown) == {:ok, html, messages}
end
test "position is so important" do
markdown = "# foo *bar* \\*baz\\*\n"
html = "<h1>\n foo \n <em>\n bar\n </em>\n *baz*\n</h1>\n"
messages = []
assert to_html1(markdown) == {:ok, html, messages}
end
test "spacy" do
markdown = "# foo \n"
html = "<h1>\n foo\n</h1>\n"
messages = []
assert to_html1(markdown) == {:ok, html, messages}
end
test "code comes first" do
markdown = " # foo\nnext"
html = "<pre><code># foo</code></pre>\n<p>\n next\n</p>\n"
messages = []
assert to_html1(markdown) == {:ok, html, messages}
end
test "some prefer to close their headers" do
markdown = "# foo#\n"
html = "<h1>\n foo\n</h1>\n"
messages = []
assert to_html1(markdown) == {:ok, html, messages}
end
test "yes, they do (prefer closing their header)" do
markdown = "### foo ### "
html = "<h3>\n foo ###\n</h3>\n"
messages = []
assert to_html1(markdown) == {:ok, html, messages}
end
end
end
# SPDX-License-Identifier: Apache-2.0
| 26.329412 | 98 | 0.508043 |
ff0dcc9dc8c1d90557eb7c020e48b34e112aea63 | 876 | ex | Elixir | clients/games/lib/google_api/games/v1/metadata.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | clients/games/lib/google_api/games/v1/metadata.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | clients/games/lib/google_api/games/v1/metadata.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Games.V1 do
@moduledoc """
API client metadata for GoogleApi.Games.V1.
"""
@discovery_revision "20210304"
def discovery_revision(), do: @discovery_revision
end
| 32.444444 | 74 | 0.756849 |
ff0dcfff7619daa0af6021de48b593c8d257f5cd | 13,246 | ex | Elixir | lib/date_time_parser.ex | tmr08c/date_time_parser | 42a93ff734bc532f86ca3a0bf90b662ce3bfade8 | [
"MIT"
] | null | null | null | lib/date_time_parser.ex | tmr08c/date_time_parser | 42a93ff734bc532f86ca3a0bf90b662ce3bfade8 | [
"MIT"
] | null | null | null | lib/date_time_parser.ex | tmr08c/date_time_parser | 42a93ff734bc532f86ca3a0bf90b662ce3bfade8 | [
"MIT"
] | null | null | null | defmodule DateTimeParser do
@moduledoc """
DateTimeParser is a tokenizer for strings that attempts to parse into a `DateTime`,
`NaiveDateTime` if timezone is not determined, `Date`, or `Time`.
The biggest ambiguity between datetime formats is whether it's `ymd` (year month day), `mdy`
(month day year), or `dmy` (day month year); this is resolved by checking if there are slashes or
dashes. If slashes, then it will try `dmy` first. All other cases will use the international
format `ymd`. Sometimes, if the conditions are right, it can even parse `dmy` with dashes if the
month is a vocal month (eg, `"Jan"`).
If the string is 10-11 digits with optional precision, then we'll try to parse it as a Unix Epoch
timestamp.
If the string is 1-5 digits with optional precision, then we'll try to parse it as a Serial
timestamp (spreadsheet time) treating 1899-12-31 as 1. This will cause Excel-produced dates from
1900-01-01 until 1900-03-01 to be incorrect, as they really are.
|digits|parser|range|notes|
|---|----|---|---|
|1-5|Serial|low = `1900-01-01`, high = `2173-10-15`. Negative numbers go to `1626-03-17`|Floats indicate time. Integers do not.|
|6-9|Tokenizer|any|This allows for "20190429" to be parsed as `2019-04-29`|
|10-11|Epoch|low = `1976-03-03T09:46:40`, high = `5138-11-16 09:46:39`|If padded with 0s, then it can capture entire range. Negative numbers not yet supported|
## Examples
```elixir
iex> DateTimeParser.parse("19 September 2018 08:15:22 AM")
{:ok, ~N[2018-09-19 08:15:22]}
iex> DateTimeParser.parse_datetime("19 September 2018 08:15:22 AM")
{:ok, ~N[2018-09-19 08:15:22]}
iex> DateTimeParser.parse_datetime("2034-01-13", assume_time: true)
{:ok, ~N[2034-01-13 00:00:00]}
iex> DateTimeParser.parse_datetime("2034-01-13", assume_time: ~T[06:00:00])
{:ok, ~N[2034-01-13 06:00:00]}
iex> DateTimeParser.parse("invalid date 10:30pm")
{:ok, ~T[22:30:00]}
iex> DateTimeParser.parse("2019-03-11T99:99:99")
{:ok, ~D[2019-03-11]}
iex> DateTimeParser.parse("2019-03-11T10:30:00pm UNK")
{:ok, ~N[2019-03-11T22:30:00]}
iex> DateTimeParser.parse("2019-03-11T22:30:00.234+00:00")
{:ok, DateTime.from_naive!(~N[2019-03-11T22:30:00.234Z], "Etc/UTC")}
iex> DateTimeParser.parse_date("2034-01-13")
{:ok, ~D[2034-01-13]}
iex> DateTimeParser.parse_date("01/01/2017")
{:ok, ~D[2017-01-01]}
iex> DateTimeParser.parse_datetime("1564154204")
{:ok, DateTime.from_naive!(~N[2019-07-26T15:16:44Z], "Etc/UTC")}
iex> DateTimeParser.parse_datetime("41261.6013888889")
{:ok, ~N[2012-12-18T14:26:00]}
iex> DateTimeParser.parse_date("44262")
{:ok, ~D[2021-03-07]}
# This is a serial number date, commonly found in spreadsheets, eg: `=VALUE("03/07/2021")`
iex> DateTimeParser.parse_datetime("1/1/18 3:24 PM")
{:ok, ~N[2018-01-01T15:24:00]}
iex> DateTimeParser.parse_datetime("1/1/18 3:24 PM", assume_utc: true)
{:ok, DateTime.from_naive!(~N[2018-01-01T15:24:00Z], "Etc/UTC")}
# or ~U[2018-01-01T15:24:00Z] in Elixir 1.9.0+
iex> DateTimeParser.parse_datetime(~s|"Mar 28, 2018 7:39:53 AM PDT"|, to_utc: true)
{:ok, DateTime.from_naive!(~N[2018-03-28T14:39:53Z], "Etc/UTC")}
iex> {:ok, datetime} = DateTimeParser.parse_datetime(~s|"Mar 1, 2018 7:39:53 AM PST"|)
iex> datetime
#DateTime<2018-03-01 07:39:53-08:00 PST PST8PDT>
iex> DateTimeParser.parse_datetime(~s|"Mar 1, 2018 7:39:53 AM PST"|, to_utc: true)
{:ok, DateTime.from_naive!(~N[2018-03-01T15:39:53Z], "Etc/UTC")}
iex> {:ok, datetime} = DateTimeParser.parse_datetime(~s|"Mar 28, 2018 7:39:53 AM PDT"|)
iex> datetime
#DateTime<2018-03-28 07:39:53-07:00 PDT PST8PDT>
iex> DateTimeParser.parse_time("10:13pm")
{:ok, ~T[22:13:00]}
iex> DateTimeParser.parse_time("10:13:34")
{:ok, ~T[10:13:34]}
iex> DateTimeParser.parse_time("18:14:21.145851000000Z")
{:ok, ~T[18:14:21.145851]}
iex> DateTimeParser.parse_datetime(nil)
{:error, "Could not parse nil"}
```
"""
import DateTimeParser.Formatters
alias DateTimeParser.{Epoch, Serial}
@epoch_regex ~r|\A(?<seconds>\d{10,11})(?:\.(?<subseconds>\d{1,10}))?\z|
@serial_regex ~r|\A-?\d{1,5}(?:\.\d{1,10})?\z|
@time_regex ~r|(?<time>\d{1,2}:\d{2}(?::\d{2})?(?:.*)?)|
@type assume_date :: {:assume_date, boolean() | Date.t()}
@type assume_time :: {:assume_time, boolean() | Time.t()}
@type assume_utc :: {:assume_utc, boolean()}
@type to_utc :: {:to_utc, boolean()}
@typedoc """
Options applicable for `parse_datetime/2`
* `:assume_utc` Default `false`.
Only applicable for strings where parsing could not determine a timezone. Instead of returning a
NaiveDateTime, this option will assume them to be in UTC timezone, and therefore return a
DateTime. If the timezone is determined, then it will continue to be returned in the original
timezone. See `to_utc` option to also convert it to UTC.
* `:to_utc` Default `false`.
If there's a timezone detected in the string, then attempt to convert to UTC timezone. If you
know that your timestamps are in the future and are going to store it for later use, it may be
better to convert to UTC and keep the original timestamp since government organizations may change
timezone rules before the timestamp elapses, therefore making the UTC timestamp wrong or invalid.
[Check out the guide on future timestamps](./future-utc-datetime.html).
* `:assume_time` Default `false`.
If a time cannot be determined, then it will not be assumed by default. If you supply `true`, then
`~T[00:00:00]` will be assumed. You can also supply your own time, and the found tokens will be
merged with it.
"""
@type parse_datetime_options :: [assume_utc() | to_utc() | assume_time()]
@typedoc """
Options applicable for `parse_date/2`
* `:assume_date` Default `false`.
If a date cannot be fully determined, then it will not be assumed by default. If you supply
`true`, then `Date.utc_today()` will be assumed. You can also supply your own date, and the found
tokens will be merged with it.
"""
@type parse_date_options :: [assume_date()]
@typedoc """
Options for `parse/2`. Combination of `t:parse_date_options/0` and `t:parse_datetime_options/0`.
"""
@type parse_options :: parse_datetime_options() | parse_date_options()
@doc """
Parse a `%DateTime{}`, `%NaiveDateTime{}`, `%Date{}`, or `%Time{}` from a string.
Accepts `t:parse_options/0`
"""
@spec parse(String.t() | nil, parse_options()) ::
{:ok, DateTime.t() | NaiveDateTime.t() | Date.t() | Time.t()} | {:error, String.t()}
def parse(string, opts \\ []) do
with {:error, _} <- parse_datetime(string, opts),
{:error, _} <- parse_date(string, opts) do
parse_time(string)
end
end
@doc """
Parse a `%DateTime{}` or `%NaiveDateTime{}` from a string. Accepts options
`t:parse_datetime_options/0`
"""
@spec parse_datetime(String.t() | nil, parse_datetime_options()) ::
{:ok, DateTime.t() | NaiveDateTime.t()} | {:error, String.t()}
def parse_datetime(string, opts \\ [])
def parse_datetime(string, opts) when is_binary(string) do
with cleaned_string <- clean(string),
{:ok, tokens, _, _, _, _} <- do_datetime_parse(cleaned_string),
{:ok, naive_datetime} <- to_naive_datetime(tokens, opts),
datetime <- to_datetime(naive_datetime, tokens),
{:ok, datetime} <- validate_day(datetime),
datetime <- maybe_convert_to_utc(datetime, opts) do
{:ok, datetime}
else
_ ->
{:error, "Could not parse #{string}"}
end
end
def parse_datetime(nil, _opts), do: {:error, "Could not parse nil"}
def parse_datetime(value, _opts), do: {:error, "Could not parse #{value}"}
defp do_datetime_parse(string) do
cond do
String.contains?(string, "/") ->
DateTimeParser.DateTime.parse_us(string)
epoch_regex_capture = Regex.named_captures(@epoch_regex, string) ->
Epoch.parse(epoch_regex_capture)
Regex.match?(@serial_regex, string) ->
Serial.parse(string)
true ->
DateTimeParser.DateTime.parse(string)
end
end
@doc """
Parse `%Time{}` from a string.
"""
@spec parse_time(String.t() | nil) :: {:ok, Time.t()} | {:error, String.t()}
def parse_time(string) when is_binary(string) do
case string |> clean() |> do_time_parse() do
{:ok, tokens, _, _, _, _} ->
to_time(tokens)
_ ->
{:error, "Could not parse #{string}"}
end
end
def parse_time(nil), do: {:error, "Could not parse nil"}
def parse_time(value), do: {:error, "Could not parse #{value}"}
defp do_time_parse(string) do
cond do
epoch_regex_capture = Regex.named_captures(@epoch_regex, string) ->
Epoch.parse(epoch_regex_capture)
Regex.match?(@serial_regex, string) ->
Serial.parse(string)
true ->
case Regex.named_captures(@time_regex, string) do
%{"time" => time} -> DateTimeParser.Time.parse(time)
_ -> DateTimeParser.Time.parse(string)
end
end
end
@doc """
Parse `%Date{}` from a string. Accepts options `t:parse_date_options/0`
"""
@spec parse_date(String.t() | nil, parse_date_options()) ::
{:ok, Date.t()} | {:error, String.t()}
def parse_date(string, opts \\ [])
def parse_date(string, opts) when is_binary(string) do
with cleaned_string <- clean(string),
{:ok, tokens, _, _, _, _} <- do_parse_date(cleaned_string),
{:ok, date} <- to_date(tokens, opts),
{:ok, _} <- validate_day(date) do
{:ok, date}
else
_ ->
{:error, "Could not parse #{string}"}
end
end
def parse_date(nil, _opts), do: {:error, "Could not parse nil"}
def parse_date(value, _opts), do: {:error, "Could not parse #{value}"}
defp do_parse_date(string) do
cond do
String.contains?(string, "/") ->
DateTimeParser.Date.parse_us(string)
epoch_regex_capture = Regex.named_captures(@epoch_regex, string) ->
Epoch.parse(epoch_regex_capture)
Regex.match?(@serial_regex, string) ->
Serial.parse(string)
true ->
DateTimeParser.Date.parse(string)
end
end
defp to_time(tokens) do
cond do
tokens[:unix_epoch] ->
with {:ok, datetime} <- Epoch.from_tokens(tokens) do
{:ok, DateTime.to_time(datetime)}
end
tokens[:serial] ->
case Serial.from_tokens(tokens, []) do
{:ok, %NaiveDateTime{} = datetime} ->
{:ok, NaiveDateTime.to_time(datetime)}
_ ->
:error
end
true ->
DateTimeParser.Time.from_tokens(tokens)
end
end
defp to_date(tokens, opts) do
cond do
tokens[:serial] ->
case Serial.from_tokens(tokens, opts) do
{:ok, %NaiveDateTime{} = naive_datetime} ->
{:ok, NaiveDateTime.to_date(naive_datetime)}
{:ok, %Date{} = date} ->
{:ok, date}
_ ->
:error
end
tokens[:unix_epoch] ->
with {:ok, datetime} <- Epoch.from_tokens(tokens),
%Date{} = date <- DateTime.to_date(datetime) do
{:ok, date}
end
true ->
DateTimeParser.Date.from_tokens(tokens, opts)
end
end
defp to_naive_datetime(tokens, opts) do
cond do
tokens[:serial] ->
case Serial.from_tokens(tokens, opts) do
{:ok, %NaiveDateTime{} = naive_datetime} ->
{:ok, naive_datetime}
_ ->
:error
end
tokens[:unix_epoch] ->
Epoch.from_tokens(tokens)
true ->
DateTimeParser.DateTime.from_tokens(tokens, opts)
end
end
defp to_datetime(%DateTime{} = datetime, _tokens), do: datetime
defp to_datetime(%NaiveDateTime{} = naive_datetime, tokens) do
DateTimeParser.DateTime.from_naive_datetime_and_tokens(naive_datetime, tokens)
end
defp validate_day(%{day: day, month: month} = date)
when month in [1, 3, 5, 7, 8, 10, 12] and day in 1..31,
do: {:ok, date}
defp validate_day(%{day: day, month: month} = date)
when month in [4, 6, 9, 11] and day in 1..30,
do: {:ok, date}
defp validate_day(%{day: day, month: 2} = date)
when day in 1..28,
do: {:ok, date}
defp validate_day(%{day: 29, month: 2, year: year} = date) do
if Timex.is_leap?(year),
do: {:ok, date},
else: :error
end
defp validate_day(_), do: :error
defp maybe_convert_to_utc(%NaiveDateTime{} = naive_datetime, opts) do
if Keyword.get(opts, :assume_utc, false) do
naive_datetime
|> DateTime.from_naive!("Etc/UTC")
|> maybe_convert_to_utc(opts)
else
naive_datetime
end
end
defp maybe_convert_to_utc(%DateTime{zone_abbr: "Etc/UTC"} = datetime, _opts), do: datetime
defp maybe_convert_to_utc(%DateTime{} = datetime, opts) do
if Keyword.get(opts, :to_utc, false) do
# empty TimezoneInfo defaults to UTC. Doing this to avoid Dialyzer errors
# since :utc is not in the typespec
Timex.Timezone.convert(datetime, %Timex.TimezoneInfo{})
else
datetime
end
end
end
| 33.790816 | 161 | 0.638457 |
ff0ddc4e0d657dd04119ed108c21969cb43d7a5c | 1,929 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/subscription_content_details.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/you_tube/lib/google_api/you_tube/v3/model/subscription_content_details.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/subscription_content_details.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.YouTube.V3.Model.SubscriptionContentDetails do
@moduledoc """
Details about the content to witch a subscription refers.
## Attributes
* `activityType` (*type:* `String.t`, *default:* `nil`) - The type of activity this subscription is for (only uploads, everything).
* `newItemCount` (*type:* `integer()`, *default:* `nil`) - The number of new items in the subscription since its content was last read.
* `totalItemCount` (*type:* `integer()`, *default:* `nil`) - The approximate number of items that the subscription points to.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:activityType => String.t(),
:newItemCount => integer(),
:totalItemCount => integer()
}
field(:activityType)
field(:newItemCount)
field(:totalItemCount)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.SubscriptionContentDetails do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.SubscriptionContentDetails.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.SubscriptionContentDetails do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.396226 | 139 | 0.729393 |
ff0de793f10a99cc06717e89bf62e2fa62763dc8 | 2,559 | ex | Elixir | clients/game_services/lib/google_api/game_services/v1beta/model/game_server_cluster_connection_info.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | 1 | 2021-10-01T09:20:41.000Z | 2021-10-01T09:20:41.000Z | clients/game_services/lib/google_api/game_services/v1beta/model/game_server_cluster_connection_info.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/game_services/lib/google_api/game_services/v1beta/model/game_server_cluster_connection_info.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.GameServices.V1beta.Model.GameServerClusterConnectionInfo do
@moduledoc """
The game server cluster connection information.
## Attributes
* `gkeClusterReference` (*type:* `GoogleApi.GameServices.V1beta.Model.GkeClusterReference.t`, *default:* `nil`) - Reference to the GKE cluster where the game servers are installed.
* `gkeHubClusterReference` (*type:* `GoogleApi.GameServices.V1beta.Model.GkeHubClusterReference.t`, *default:* `nil`) - Reference to a Kubernetes cluster registered through GKE Hub. See https://cloud.google.com/anthos/multicluster-management/ for more information about registering Kubernetes clusters.
* `namespace` (*type:* `String.t`, *default:* `nil`) - Namespace designated on the game server cluster where the Agones game server instances will be created. Existence of the namespace will be validated during creation.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:gkeClusterReference =>
GoogleApi.GameServices.V1beta.Model.GkeClusterReference.t() | nil,
:gkeHubClusterReference =>
GoogleApi.GameServices.V1beta.Model.GkeHubClusterReference.t() | nil,
:namespace => String.t() | nil
}
field(:gkeClusterReference, as: GoogleApi.GameServices.V1beta.Model.GkeClusterReference)
field(:gkeHubClusterReference, as: GoogleApi.GameServices.V1beta.Model.GkeHubClusterReference)
field(:namespace)
end
defimpl Poison.Decoder, for: GoogleApi.GameServices.V1beta.Model.GameServerClusterConnectionInfo do
def decode(value, options) do
GoogleApi.GameServices.V1beta.Model.GameServerClusterConnectionInfo.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.GameServices.V1beta.Model.GameServerClusterConnectionInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.527273 | 306 | 0.762798 |
ff0e2fc5a3b8ca1e3e64c1974a375f3cb2f6f36a | 1,588 | exs | Elixir | test/fixtures/example_app/config/test.schema.exs | amco/exrm | 2600bffb92c6e129dd513978578992fc64d9ce08 | [
"MIT"
] | 986 | 2015-01-01T13:20:41.000Z | 2021-09-03T02:18:55.000Z | test/fixtures/example_app/config/test.schema.exs | amco/exrm | 2600bffb92c6e129dd513978578992fc64d9ce08 | [
"MIT"
] | 332 | 2015-01-02T09:17:25.000Z | 2018-04-06T22:32:25.000Z | test/fixtures/example_app/config/test.schema.exs | amco/exrm | 2600bffb92c6e129dd513978578992fc64d9ce08 | [
"MIT"
] | 132 | 2015-01-05T06:11:21.000Z | 2019-10-31T13:31:12.000Z | [
import: [
:fake_project
],
mappings: [
"test.foo": [
doc: "Documentation for test.foo goes here.",
to: "test.foo",
datatype: :binary,
default: "bar"
],
"test.env": [
doc: "The current execution environment",
to: "test.env",
datatype: :atom,
default: :dev
],
"test.debug.level": [
doc: """
Set the appropriate tracing options.
Valid options are:
- active: tracing is enabled
- active-debug: tracing is enabled, with debugging
- passive: tracing must be manually invoked
- off: tracing is disabled
Defaults to off
""",
to: "test.debug_level",
datatype: [
enum: [
:active,
:"active-debug",
:passive,
:off
]
],
default: :off
],
"test.some_val": [
doc: """
Just a some val
""",
to: "test.some_val",
datatype: :integer,
default: 10
]
],
transforms: [
"test.debug_level": fn conf ->
case Conform.Conf.get(conf, "test.debug_level") do
[{_, :active}] ->
{:on, []}
[{_, :"active-debug"}] ->
{:on, [:debug]}
[{_, :passive}] ->
{:on, [:passive]}
[{_, :off}] ->
{:off, []}
[] ->
end
end,
"test.some_val": fn conf ->
case Conform.Conf.get(conf, "test.some_val") do
[{_, x}] when is_integer(x) -> FakeProject.inc_some_val(x)
[{_, x}] -> x
end
end
]
]
| 22.366197 | 66 | 0.457809 |
ff0e3773554ce699a2c82cdabbbfc2b2fc2ea5d8 | 1,651 | ex | Elixir | lib/pixel_font/table_source/otf_layout/sequence_context_3.ex | Dalgona/pixel_font | 6a65bf85e5228296eb29fddbfdd690565767ff76 | [
"MIT"
] | 17 | 2020-09-14T15:25:38.000Z | 2022-03-05T17:14:24.000Z | lib/pixel_font/table_source/otf_layout/sequence_context_3.ex | Dalgona/pixel_font | 6a65bf85e5228296eb29fddbfdd690565767ff76 | [
"MIT"
] | 1 | 2021-08-19T05:05:37.000Z | 2021-08-19T05:05:37.000Z | lib/pixel_font/table_source/otf_layout/sequence_context_3.ex | Dalgona/pixel_font | 6a65bf85e5228296eb29fddbfdd690565767ff76 | [
"MIT"
] | null | null | null | defmodule PixelFont.TableSource.OTFLayout.SequenceContext3 do
alias PixelFont.TableSource.OTFLayout.GlyphCoverage
alias PixelFont.TableSource.OTFLayout.Lookup
alias PixelFont.Util
defstruct input: [], lookup_records: []
@type t :: %__MODULE__{
input: [GlyphCoverage.t()],
lookup_records: [{integer(), Lookup.id()}]
}
@spec compile(t(), keyword()) :: binary()
def compile(%__MODULE__{} = subtable, opts) do
lookup_indices = opts[:lookup_indices]
lookup_record_count = length(subtable.lookup_records)
coverage_offset_base = 6 + length(subtable.input) * 2 + lookup_record_count * 4
compiled_lookup_records =
Enum.map(subtable.lookup_records, fn {glyph_pos, lookup_id} ->
<<glyph_pos::16, lookup_indices[lookup_id]::16>>
end)
{_, coverage_offsets, compiled_coverages} =
Util.offsetted_binaries(subtable.input, coverage_offset_base, &GlyphCoverage.compile/1)
IO.iodata_to_binary([
# format
<<3::16>>,
# glyphCount
<<length(subtable.input)::16>>,
# seqLookupCount
<<lookup_record_count::16>>,
# coverageOffsets[]
coverage_offsets,
# seqLookupRecords[]
compiled_lookup_records,
# Coverage tables
compiled_coverages
])
end
defimpl PixelFont.TableSource.GPOS.Subtable do
alias PixelFont.TableSource.OTFLayout.SequenceContext3
defdelegate compile(subtable, opts), to: SequenceContext3
end
defimpl PixelFont.TableSource.GSUB.Subtable do
alias PixelFont.TableSource.OTFLayout.SequenceContext3
defdelegate compile(subtable, opts), to: SequenceContext3
end
end
| 30.018182 | 93 | 0.701393 |
ff0e4c3c5433049c51dd2bd0ee9ba18fbf274b76 | 1,975 | ex | Elixir | lib/glimesh/graphql/resolvers/chat_resolver.ex | moltenllama/glimesh.tv | 8bf8e2bfc54390b15014689ab5bca330ecf6d74c | [
"MIT"
] | null | null | null | lib/glimesh/graphql/resolvers/chat_resolver.ex | moltenllama/glimesh.tv | 8bf8e2bfc54390b15014689ab5bca330ecf6d74c | [
"MIT"
] | null | null | null | lib/glimesh/graphql/resolvers/chat_resolver.ex | moltenllama/glimesh.tv | 8bf8e2bfc54390b15014689ab5bca330ecf6d74c | [
"MIT"
] | null | null | null | defmodule Glimesh.Resolvers.ChatResolver do
@moduledoc false
use Appsignal.Instrumentation.Decorators
alias Glimesh.Chat
@decorate transaction_event()
def create_chat_message(
_parent,
%{channel_id: channel_id, message: message_obj},
%{context: %{user_access: ua}}
) do
with :ok <- Bodyguard.permit(Glimesh.Resolvers.Scopes, :chat, ua) do
channel = Glimesh.ChannelLookups.get_channel!(channel_id)
Chat.create_chat_message(ua.user, channel, message_obj)
end
end
@decorate transaction_event()
def short_timeout_user(parent, params, context) do
perform_channel_action(parent, params, context, :short_timeout)
end
@decorate transaction_event()
def long_timeout_user(parent, params, context) do
perform_channel_action(parent, params, context, :long_timeout)
end
@decorate transaction_event()
def ban_user(parent, params, context) do
perform_channel_action(parent, params, context, :ban)
end
@decorate transaction_event()
def unban_user(parent, params, context) do
perform_channel_action(parent, params, context, :unban)
end
@doc """
Send an action to the Chat context to figure out permissions on, but first make sure the user_access is allowed access to the chat.
"""
def perform_channel_action(
_parent,
%{channel_id: channel_id, user_id: user_id},
%{context: %{user_access: ua}},
action
) do
with :ok <- Bodyguard.permit(Glimesh.Resolvers.Scopes, :chat, ua) do
channel = Glimesh.ChannelLookups.get_channel!(channel_id)
moderator = ua.user
user = Glimesh.Accounts.get_user!(user_id)
case action do
:short_timeout -> Chat.short_timeout_user(moderator, channel, user)
:long_timeout -> Chat.long_timeout_user(moderator, channel, user)
:ban -> Chat.ban_user(moderator, channel, user)
:unban -> Chat.unban_user(moderator, channel, user)
end
end
end
end
| 30.859375 | 133 | 0.703291 |
ff0e6e83e515432e7510cdc4dc5e21e139849d1a | 10,016 | exs | Elixir | test/ueberauth_adfs/adfs_test.exs | Kuret/ueberauth_adfs | 3082f07b09dc223b8967abe30ad492fe450ead67 | [
"MIT"
] | null | null | null | test/ueberauth_adfs/adfs_test.exs | Kuret/ueberauth_adfs | 3082f07b09dc223b8967abe30ad492fe450ead67 | [
"MIT"
] | null | null | null | test/ueberauth_adfs/adfs_test.exs | Kuret/ueberauth_adfs | 3082f07b09dc223b8967abe30ad492fe450ead67 | [
"MIT"
] | null | null | null | defmodule Ueberauth.Strategy.ADFSTest do
use ExUnit.Case
use Ueberauth.Strategy
import Mock
alias Ueberauth.Strategy.ADFS
@env_values adfs_url: "https://example.com",
adfs_metadata_url: "https://example.com/metadata.xml",
client_id: "example_client",
resource_identifier: "example_resource"
@mock_metadata {:ok,
%HTTPoison.Response{
body:
"<EntityDescriptor><ds:Signature><KeyInfo>" <>
"<X509Data><X509Certificate>1234</X509Certificate></X509Data>" <>
"</KeyInfo></ds:Signature></EntityDescriptor>"
}}
@user_claim %Joken.Token{
claims: %{
"email" => "user@test.com",
"given_name" => "John",
"family_name" => "Doe",
"winaccountname" => "john1"
}
}
describe "ADFS Strategy" do
setup_with_mocks [
{ADFS.OAuth, [:passthrough],
[get_token: fn code, _ -> {:ok, %{token: %{access_token: code}}} end]},
{Application, [:passthrough], [get_env: fn _, _ -> @env_values end]},
{HTTPoison, [:passthrough], [get: fn _, _, _ -> @mock_metadata end]},
{Joken, [:passthrough],
[token: fn _ -> nil end, with_signer: fn _, _ -> nil end, verify: fn _ -> @user_claim end]},
{JOSE.JWK, [:passthrough], [from_pem: fn _ -> %{foo: :bar} end]},
{Ueberauth.Strategy.Helpers, [:passthrough],
[
callback_url: fn _ -> "https://test.com" end,
options: fn _ -> [uid_field: "email"] end,
redirect!: fn _conn, auth_url -> auth_url end,
set_errors!: fn _conn, errors -> errors end
]}
] do
:ok
end
test "Handles the ADFS request" do
request = ADFS.handle_request!(%Plug.Conn{params: %{}})
assert request =~ "#{@env_values[:adfs_url]}/adfs/oauth2/authorize"
end
test "Redirects ADFS request to index when missing config" do
with_mock Application, [:passthrough], get_env: fn _, _ -> nil end do
assert ADFS.handle_request!(nil) == "/"
end
end
test "Handles the logout request" do
assert ADFS.logout(nil, nil) =~ "#{@env_values[:adfs_url]}/adfs/ls/?wa=wsignout1.0"
end
test "Gives an error upon logout request with missing config" do
with_mock Application, [:passthrough], get_env: fn _, _ -> nil end do
assert ADFS.logout(nil, nil) == [
%Ueberauth.Failure.Error{
message: "Failed to logout, please close your browser",
message_key: "Logout Failed"
}
]
end
end
test "Handle callback from ADFS provider, set claims user from JWT" do
conn = ADFS.handle_callback!(%Plug.Conn{params: %{"code" => "1234"}})
assert conn.private.adfs_user == @user_claim.claims
end
test "Handle callback from ADFS provider when JWT is unauthorized" do
with_mock Joken,
[:passthrough],
token: fn _ -> nil end,
with_signer: fn _, _ -> nil end,
verify: fn _ -> nil end do
[error] = ADFS.handle_callback!(%Plug.Conn{params: %{"code" => "1234"}})
assert error ==
%Ueberauth.Failure.Error{
message: "unauthorized",
message_key: "token"
}
end
end
test "Handle callback from ADFS provider when metadata is malformed" do
with_mock HTTPoison,
[:passthrough],
get: fn _, _, _ -> {:ok, %HTTPoison.Response{body: ""}} end do
[error] = ADFS.handle_callback!(%Plug.Conn{params: %{"code" => "1234"}})
assert error == %Ueberauth.Failure.Error{message: "malformed", message_key: "metadata"}
end
end
test "Handle callback from ADFS provider when certificate is not found in metadata" do
with_mock HTTPoison, [:passthrough],
get: fn _, _, _ ->
{:ok, %HTTPoison.Response{body: "<EntityDescriptor></EntityDescriptor>"}}
end do
[error] = ADFS.handle_callback!(%Plug.Conn{params: %{"code" => "1234"}})
assert error == %Ueberauth.Failure.Error{message: "not_found", message_key: "certificate"}
end
end
test "Handle callback from ADFS provider when metadata url is not found" do
with_mock HTTPoison,
[:passthrough],
get: fn _, _, _ -> {:error, %HTTPoison.Error{}} end do
[error] = ADFS.handle_callback!(%Plug.Conn{params: %{"code" => "1234"}})
assert error == %Ueberauth.Failure.Error{
message: "not_found",
message_key: "metadata_url"
}
end
end
test "Handle callback from ADFS provider with token error" do
with_mock ADFS.OAuth,
[:passthrough],
get_token: fn _, _ -> {:error, %{reason: "token_error"}} end do
[error] = ADFS.handle_callback!(%Plug.Conn{params: %{"code" => "1234"}})
assert error == %Ueberauth.Failure.Error{
message: "token_error",
message_key: "Authentication Error"
}
end
end
test "Handle callback from ADFS provider with OAuth2 error" do
with_mock ADFS.OAuth, [:passthrough],
get_token: fn _, _ ->
{:error, %OAuth2.Response{body: %{"error_description" => "oauth_error"}}}
end do
[error] = ADFS.handle_callback!(%Plug.Conn{params: %{"code" => "1234"}})
assert error == %Ueberauth.Failure.Error{
message: "oauth_error",
message_key: "Authentication Error"
}
end
end
test "Handle callback from ADFS provider with error in the params" do
[error] =
ADFS.handle_callback!(%Plug.Conn{
params: %{"error" => "param_error", "error_description" => "param_error_description"}
})
assert error == %Ueberauth.Failure.Error{
message: "param_error_description",
message_key: "param_error"
}
end
test "Handle callback from ADFS provider with missing code" do
[error] = ADFS.handle_callback!(%Plug.Conn{})
assert error == %Ueberauth.Failure.Error{
message: "No code received",
message_key: "missing_code"
}
end
test "Handles cleanup of the private vars in the conn" do
conn =
%Plug.Conn{params: %{"code" => "1234"}}
|> ADFS.handle_callback!()
|> ADFS.handle_cleanup!()
assert conn.private.adfs_user == nil
assert conn.private.adfs_token == nil
end
test "Gets the uid field from the conn" do
email =
%Plug.Conn{params: %{"code" => "1234"}}
|> ADFS.handle_callback!()
|> ADFS.uid()
assert email == "user@test.com"
end
test "Gets the token credentials from the conn" do
token =
%Plug.Conn{params: %{"code" => "1234"}}
|> ADFS.handle_callback!()
|> ADFS.credentials()
assert token == %Ueberauth.Auth.Credentials{}
end
test "Gets the user info from the conn" do
info =
%Plug.Conn{params: %{"code" => "1234"}}
|> ADFS.handle_callback!()
|> ADFS.info()
assert info == %Ueberauth.Auth.Info{
name: "John Doe",
nickname: "john1",
email: "user@test.com"
}
end
test "Gets the extra info from the conn" do
extra =
%Plug.Conn{params: %{"code" => "1234"}}
|> ADFS.handle_callback!()
|> ADFS.extra()
assert %Ueberauth.Auth.Extra{raw_info: %{token: %Joken.Token{}, user: %{}}} = extra
end
test "Returns the configured status when env is present" do
assert ADFS.configured?() == true
end
test "Returns the configured status when env is not present" do
with_mock Application, [:passthrough], get_env: fn _, _ -> nil end do
assert ADFS.configured?() == false
end
end
test "Returns the configured status when env is missing values" do
with_mock Application,
[:passthrough],
get_env: fn _, _ -> [adfs_url: "https://test.com"] end do
assert ADFS.configured?() == false
end
end
end
describe "ADFS Oauth Client" do
setup_with_mocks [{Application, [:passthrough], [get_env: fn _, _ -> @env_values end]}] do
:ok
end
test "Gets the client with the config properties" do
client = ADFS.OAuth.client()
assert client.client_id == @env_values[:client_id]
assert client.authorize_url == "#{@env_values[:adfs_url]}/adfs/oauth2/authorize"
assert client.token_url == "#{@env_values[:adfs_url]}/adfs/oauth2/token"
end
test "Gets the client with options" do
client = ADFS.OAuth.client(client_id: "other_client")
assert client.client_id == "other_client"
end
test "Doesn't get the client without config" do
with_mock Application, [:passthrough], get_env: fn _, _ -> nil end do
client = ADFS.OAuth.client()
assert client == {nil, []}
end
end
test "Get the authorize_url" do
assert ADFS.OAuth.authorize_url!() ==
"#{@env_values[:adfs_url]}/adfs/oauth2/authorize?client_id=example_client&redirect_uri=&response_type=code"
end
test "Gets the signout url" do
assert ADFS.OAuth.signout_url() ==
{:ok, "#{@env_values[:adfs_url]}/adfs/ls/?wa=wsignout1.0"}
end
test "Gets the signout url with params" do
assert ADFS.OAuth.signout_url(%{redirect_uri: "https://test.com"}) ==
{:ok, "#{@env_values[:adfs_url]}/adfs/ls/?wa=wsignout1.0&wreply=https://test.com"}
end
test "Fails to get the signout url without config" do
with_mock Application, [:passthrough], get_env: fn _, _ -> nil end do
assert ADFS.OAuth.signout_url() == {:error, :failed_to_logout}
end
end
end
end
| 33.837838 | 122 | 0.576977 |
ff0e95008a654aaa91f2385b827b6d6c580be1e0 | 1,192 | ex | Elixir | lib/k8s/client/runner/async.ex | Overbryd/k8s | 7057601b1f191b47294133045c9e5b74bd9dd525 | [
"MIT"
] | null | null | null | lib/k8s/client/runner/async.ex | Overbryd/k8s | 7057601b1f191b47294133045c9e5b74bd9dd525 | [
"MIT"
] | null | null | null | lib/k8s/client/runner/async.ex | Overbryd/k8s | 7057601b1f191b47294133045c9e5b74bd9dd525 | [
"MIT"
] | null | null | null | defmodule K8s.Client.Runner.Async do
@moduledoc """
`K8s.Client` runner to process a batch of operations asynchronously.
"""
alias K8s.Client.Runner.Base
alias K8s.Operation
@doc """
Async run multiple operations. Operations will be returned in same order given.
Operations will not cease in event of failure.
## Example
Get a list of pods, then map each one to an individual `GET` operation:
```elixir
# Get the pods
operation = K8s.Client.list("v1", "Pod", namespace: :all)
{:ok, %{"items" => pods}} = K8s.Client.run(operation, "test-cluster")
# Map each one to an individual `GET` operation.
operations = Enum.map(pods, fn(%{"metadata" => %{"name" => name, "namespace" => ns}}) ->
K8s.Client.get("v1", "Pod", namespace: ns, name: name)
end)
# Get the results asynchronously
results = K8s.Client.Async.run(operations, "test-cluster")
```
"""
@spec run(list(Operation.t()), binary, keyword) :: list({:ok, struct} | {:error, struct})
def run(operations, cluster_name, opts \\ []) do
operations
|> Enum.map(&Task.async(fn -> Base.run(&1, cluster_name, opts) end))
|> Enum.map(&Task.await/1)
end
end
| 31.368421 | 92 | 0.645134 |
ff0eb69c30a8a6f4709fc45284fd5ca4246cf5eb | 484 | exs | Elixir | config/test.exs | CaseyKelly/PhoenixAuthAPI | d3795f9dbc94efbd3aa869a6c4960add1a68e05f | [
"Apache-2.0"
] | null | null | null | config/test.exs | CaseyKelly/PhoenixAuthAPI | d3795f9dbc94efbd3aa869a6c4960add1a68e05f | [
"Apache-2.0"
] | null | null | null | config/test.exs | CaseyKelly/PhoenixAuthAPI | d3795f9dbc94efbd3aa869a6c4960add1a68e05f | [
"Apache-2.0"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :my_app, MyApp.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :my_app, MyApp.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "my_app_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
| 24.2 | 56 | 0.729339 |
ff0eb9fcef4ef32a8d64f4e4042732ff0d63e7cb | 405 | exs | Elixir | config/dogma.exs | craigp/elixir-infobip | 6f965319e9312ee03128cacc586fa505c917ea06 | [
"MIT"
] | 3 | 2016-07-26T22:33:31.000Z | 2020-01-14T23:31:42.000Z | config/dogma.exs | craigp/elixir-infobip | 6f965319e9312ee03128cacc586fa505c917ea06 | [
"MIT"
] | null | null | null | config/dogma.exs | craigp/elixir-infobip | 6f965319e9312ee03128cacc586fa505c917ea06 | [
"MIT"
] | 1 | 2017-05-15T22:05:43.000Z | 2017-05-15T22:05:43.000Z | use Mix.Config
alias Dogma.Rule
config :dogma,
# Select a set of rules as a base
rule_set: Dogma.RuleSet.All,
# # Pick paths not to lint
# exclude: [
# ~r(\Alib/vendor/),
# ],
# Override an existing rule configuration
override: [
%Rule.LineLength{enabled: false},
# %Rule.PipelineStart{enabled: false},
%Rule.InfixOperatorPadding{enabled: false},
%Rule.TrailingBlankLines{enabled: false}
]
| 18.409091 | 45 | 0.711111 |
ff0edeeca8fefa9be3c46408dba7337cd2ccbacb | 4,594 | ex | Elixir | lib/membrane/core/element/playback_buffer.ex | mkaput/membrane-core | f65ae3d847f2c10f3ab20d0c7aa75b0faa274ec7 | [
"Apache-2.0"
] | null | null | null | lib/membrane/core/element/playback_buffer.ex | mkaput/membrane-core | f65ae3d847f2c10f3ab20d0c7aa75b0faa274ec7 | [
"Apache-2.0"
] | null | null | null | lib/membrane/core/element/playback_buffer.ex | mkaput/membrane-core | f65ae3d847f2c10f3ab20d0c7aa75b0faa274ec7 | [
"Apache-2.0"
] | null | null | null | defmodule Membrane.Core.Element.PlaybackBuffer do
@moduledoc false
# Buffer for storing messages that cannot be handled in current playback state.
# Allows to avoid race conditions when one element changes playback state
# before another does.
alias Membrane.{Buffer, Core, Event}
alias Core.Playback
alias Core.Element.{
BufferController,
CapsController,
DemandController,
EventController,
PadModel,
State
}
require PadModel
use Core.Element.Log
use Bunch
@type t :: %__MODULE__{
q: Qex.t()
}
@type message_t ::
{:membrane_demand | :membrane_buffer | :membrane_caps | :membrane_event, args :: list}
defstruct q: nil
@qe Qex
@spec new() :: t
def new do
%__MODULE__{q: @qe.new}
end
@doc """
Stores message if it cannot be handled yet.
"""
@spec store(message_t, State.t()) :: State.stateful_try_t()
def store(msg, %State{playback: %Playback{state: :playing}} = state), do: exec(msg, state)
def store({type, _args} = msg, state)
when type in [:membrane_event, :membrane_caps] do
exec(msg, state)
|> provided(
that: state.playback_buffer |> empty?,
else:
state
|> Bunch.Struct.update_in([:playback_buffer, :q], fn q -> q |> @qe.push(msg) end)
~> (state -> {:ok, state})
)
end
def store(msg, state) do
state
|> Bunch.Struct.update_in([:playback_buffer, :q], fn q -> q |> @qe.push(msg) end)
~> (state -> {:ok, state})
end
@doc """
Handles messages from buffer and passes them to proper controller, until they
can be handled in current playback state.
"""
@spec eval(State.t()) :: State.stateful_try_t()
def eval(%State{playback: %Playback{state: :playing}} = state) do
debug("evaluating playback buffer", state)
with {:ok, state} <-
state.playback_buffer.q
|> Bunch.Enum.try_reduce(state, &exec/2),
do: {:ok, state |> Bunch.Struct.put_in([:playback_buffer, :q], @qe.new)}
end
def eval(state), do: {:ok, state}
@spec empty?(t) :: boolean
defp empty?(%__MODULE__{q: q}), do: q |> Enum.empty?()
@spec exec(message_t, State.t()) :: State.stateful_try_t()
# Callback invoked on demand request coming from the source pad in the pull mode
defp exec({:membrane_demand, [size, pad_name]}, state) do
PadModel.assert_data!(pad_name, %{direction: :source}, state)
demand =
if size == 0 do
"dumb demand"
else
"demand of size #{inspect(size)}"
end
debug("Received #{demand} on pad #{inspect(pad_name)}", state)
DemandController.handle_demand(pad_name, size, state)
end
# Callback invoked on buffer coming through the sink pad
defp exec({:membrane_buffer, [buffers, pad_name]}, state) do
PadModel.assert_data!(pad_name, %{direction: :sink}, state)
debug(
["
Received buffers on pad #{inspect(pad_name)}
Buffers: ", Buffer.print(buffers)],
state
)
{messages, state} =
PadModel.get_and_update_data!(pad_name, :sticky_messages, &{&1, []}, state)
with {:ok, state} <-
messages
|> Enum.reverse()
|> Bunch.Enum.try_reduce(state, fn msg, st -> msg.(st) end) do
{:ok, state} =
cond do
PadModel.get_data!(pad_name, :sos, state) |> Kernel.not() ->
event = %{Event.sos() | payload: :auto_sos}
EventController.handle_event(pad_name, event, state)
true ->
{:ok, state}
end
BufferController.handle_buffer(pad_name, buffers, state)
end
end
# Callback invoked on incoming caps
defp exec({:membrane_caps, [caps, pad_name]}, state) do
PadModel.assert_data!(pad_name, %{direction: :sink}, state)
debug(
"""
Received caps on pad #{inspect(pad_name)}
Caps: #{inspect(caps)}
""",
state
)
CapsController.handle_caps(pad_name, caps, state)
end
# Callback invoked on incoming event
defp exec({:membrane_event, [event, pad_name]}, state) do
PadModel.assert_instance!(pad_name, state)
debug(
"""
Received event on pad #{inspect(pad_name)}
Event: #{inspect(event)}
""",
state
)
do_exec = fn state ->
EventController.handle_event(pad_name, event, state)
end
case event.stick_to do
:nothing ->
do_exec.(state)
:buffer ->
PadModel.update_data!(
pad_name,
:sticky_messages,
&[do_exec | &1],
state
)
~> (state -> {:ok, state})
end
end
end
| 26.102273 | 96 | 0.609491 |
ff0f0dd1a00511194c24b4545a60a9b0e7d5abc7 | 94 | exs | Elixir | config/config.exs | denvaar/esbuild | e2d01c87ecd60d02ab8151dcbd51a24a13495f21 | [
"MIT"
] | null | null | null | config/config.exs | denvaar/esbuild | e2d01c87ecd60d02ab8151dcbd51a24a13495f21 | [
"MIT"
] | null | null | null | config/config.exs | denvaar/esbuild | e2d01c87ecd60d02ab8151dcbd51a24a13495f21 | [
"MIT"
] | null | null | null | import Config
config :esbuild,
version: "0.13.4",
another: [
args: ["--version"]
]
| 11.75 | 23 | 0.574468 |
ff0f1bea76dc7540d76579d836edffd2c803d419 | 2,720 | exs | Elixir | test/my_app/clrt_manager_test.exs | normanpatrick/elixir-python-task-api | 84a12eff26ddf98a29526630e4ce9fa1076545cb | [
"MIT"
] | null | null | null | test/my_app/clrt_manager_test.exs | normanpatrick/elixir-python-task-api | 84a12eff26ddf98a29526630e4ce9fa1076545cb | [
"MIT"
] | null | null | null | test/my_app/clrt_manager_test.exs | normanpatrick/elixir-python-task-api | 84a12eff26ddf98a29526630e4ce9fa1076545cb | [
"MIT"
] | null | null | null | defmodule MyApp.CLRTManagerTest do
use MyApp.DataCase
alias MyApp.CLRTManager
describe "slrtasks" do
alias MyApp.CLRTManager.SLRTask
@valid_attrs %{description: "some description",
is_active: true,
name: "some name",
status: "some status"}
@update_attrs %{description: "some updated description",
is_active: false,
name: "some updated name",
status: "some updated status"}
@invalid_attrs %{description: nil,
is_active: nil,
name: nil,
status: nil}
def slr_task_fixture(attrs \\ %{}) do
{:ok, slr_task} =
attrs
|> Enum.into(@valid_attrs)
|> CLRTManager.create_slr_task()
slr_task
end
test "list_slrtasks/0 returns all slrtasks" do
slr_task = slr_task_fixture()
assert CLRTManager.list_slrtasks() == [slr_task]
end
test "get_slr_task!/1 returns the slr_task with given id" do
slr_task = slr_task_fixture()
assert CLRTManager.get_slr_task!(slr_task.id) == slr_task
end
test "create_slr_task/1 with valid data creates a slr_task" do
assert {:ok, %SLRTask{} = slr_task} = CLRTManager.create_slr_task(@valid_attrs)
assert slr_task.description == "some description"
assert slr_task.is_active == true
assert slr_task.name == "some name"
end
test "create_slr_task/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = CLRTManager.create_slr_task(@invalid_attrs)
end
test "update_slr_task/2 with valid data updates the slr_task" do
slr_task = slr_task_fixture()
assert {:ok, %SLRTask{} = slr_task} = CLRTManager.update_slr_task(slr_task, @update_attrs)
assert slr_task.description == "some updated description"
assert slr_task.is_active == false
assert slr_task.name == "some updated name"
end
test "update_slr_task/2 with invalid data returns error changeset" do
slr_task = slr_task_fixture()
assert {:error, %Ecto.Changeset{}} = CLRTManager.update_slr_task(slr_task, @invalid_attrs)
assert slr_task == CLRTManager.get_slr_task!(slr_task.id)
end
test "delete_slr_task/1 deletes the slr_task" do
slr_task = slr_task_fixture()
assert {:ok, %SLRTask{}} = CLRTManager.delete_slr_task(slr_task)
assert_raise Ecto.NoResultsError, fn -> CLRTManager.get_slr_task!(slr_task.id) end
end
test "change_slr_task/1 returns a slr_task changeset" do
slr_task = slr_task_fixture()
assert %Ecto.Changeset{} = CLRTManager.change_slr_task(slr_task)
end
end
end
| 34.871795 | 96 | 0.653676 |
ff0f27ba19f5facb1886c4a37e89de6f139992e2 | 696 | ex | Elixir | lib/ofh_web/gettext.ex | gnucifer/open-food-hub | 24632b6e8a01253c716db976d78a313746d5e38f | [
"BSD-3-Clause"
] | null | null | null | lib/ofh_web/gettext.ex | gnucifer/open-food-hub | 24632b6e8a01253c716db976d78a313746d5e38f | [
"BSD-3-Clause"
] | null | null | null | lib/ofh_web/gettext.ex | gnucifer/open-food-hub | 24632b6e8a01253c716db976d78a313746d5e38f | [
"BSD-3-Clause"
] | null | null | null | defmodule OfhWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import OfhWeb.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :ofh
end
| 27.84 | 72 | 0.675287 |
ff0f41b741495f951ed3dde313d35d137f250750 | 4,630 | ex | Elixir | clients/container/lib/google_api/container/v1/request_builder.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/container/lib/google_api/container/v1/request_builder.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/container/lib/google_api/container/v1/request_builder.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Container.V1.RequestBuilder do
@moduledoc """
Helper functions for building Tesla requests
"""
@path_template_regex ~r/{(\+?[^}]+)}/i
@doc """
Specify the request method when building a request
## Parameters
- request (Map) - Collected request options
- m (String) - Request method
## Returns
Map
"""
@spec method(map(), String.t) :: map()
def method(request, m) do
Map.put_new(request, :method, m)
end
@doc """
Specify the request method when building a request
## Parameters
- request (Map) - Collected request options
- u (String) - Request URL
## Returns
Map
"""
@spec url(map(), String.t, Map.t) :: map()
def url(request, u, replacements) do
url(request, replace_path_template_vars(u, replacements))
end
def url(request, u) do
Map.put_new(request, :url, u)
end
def replace_path_template_vars(u, replacements) do
Regex.replace(@path_template_regex, u, fn (_, var) -> replacement_value(var, replacements) end)
end
defp replacement_value("+" <> name, replacements) do
URI.decode(replacement_value(name, replacements))
end
defp replacement_value(name, replacements) do
Map.get(replacements, name, "")
end
@doc """
Add optional parameters to the request
## Parameters
- request (Map) - Collected request options
- definitions (Map) - Map of parameter name to parameter location.
- options (KeywordList) - The provided optional parameters
## Returns
Map
"""
@spec add_optional_params(map(), %{optional(:atom) => :atom}, keyword()) :: map()
def add_optional_params(request, _, []), do: request
def add_optional_params(request, definitions, [{key, value} | tail]) do
case definitions do
%{^key => location} ->
request
|> add_param(location, key, value)
|> add_optional_params(definitions, tail)
_ ->
add_optional_params(request, definitions, tail)
end
end
@doc """
Add optional parameters to the request
## Parameters
- request (Map) - Collected request options
- location (atom) - Where to put the parameter
- key (atom) - The name of the parameter
- value (any) - The value of the parameter
## Returns
Map
"""
@spec add_param(map(), :atom, :atom, any()) :: map()
def add_param(request, :body, :body, value), do: Map.put(request, :body, value)
def add_param(request, :body, key, value) do
request
|> Map.put_new_lazy(:body, &Tesla.Multipart.new/0)
|> Map.update!(:body, &(Tesla.Multipart.add_field(&1, key, Poison.encode!(value), headers: [{:"Content-Type", "application/json"}])))
end
def add_param(request, :file, name, path) do
request
|> Map.put_new_lazy(:body, &Tesla.Multipart.new/0)
|> Map.update!(:body, &(Tesla.Multipart.add_file(&1, path, name: name)))
end
def add_param(request, :form, name, value) do
request
|> Map.update(:body, %{name => value}, &(Map.put(&1, name, value)))
end
def add_param(request, location, key, value) do
Map.update(request, location, [{key, value}], &(&1 ++ [{key, value}]))
end
@doc """
Handle the response for a Tesla request
## Parameters
- env (Tesla.Env) - The response object
- struct - The shape of the struct to deserialize into
## Returns
{:ok, struct} on success
{:error, info} on failure
"""
@spec decode(Tesla.Env.t) :: {:ok, struct()} | {:error, Tesla.Env.t}
def decode(%Tesla.Env{status: 200, body: body}), do: Poison.decode(body)
def decode(response) do
{:error, response}
end
@spec decode(Tesla.Env.t, struct()) :: {:ok, struct()} | {:error, Tesla.Env.t}
def decode(%Tesla.Env{status: 200} = env, false), do: {:ok, env}
def decode(%Tesla.Env{status: 200, body: body}, struct) do
Poison.decode(body, as: struct)
end
def decode(response, _struct) do
{:error, response}
end
end
| 28.757764 | 137 | 0.669546 |
ff0f6e19349dc145bbf2fc5ae4e07b4f1e041a37 | 1,005 | ex | Elixir | lib/typetalk/client_credential.ex | takuji/typetalk-elixir | 202ec4968733f630d46ec15e5879caa1bb65d0ae | [
"MIT"
] | 3 | 2017-04-14T00:59:55.000Z | 2017-04-14T01:06:33.000Z | lib/typetalk/client_credential.ex | takuji/typetalk-elixir | 202ec4968733f630d46ec15e5879caa1bb65d0ae | [
"MIT"
] | null | null | null | lib/typetalk/client_credential.ex | takuji/typetalk-elixir | 202ec4968733f630d46ec15e5879caa1bb65d0ae | [
"MIT"
] | null | null | null | defmodule Typetalk.ClientCredential do
@moduledoc """
Function to get an access token using a client credential.
You use this module when you are the only user of Typetalk API.
"""
@default_scope "my,topic.read,topi.post"
@doc """
Returns an access token and related information.
## Example
{:ok, auth} = Typetalk.ClientCredential.access_token("your-client-id",
"your-client-secret",
"my,topic.read,topic.post")
## API Doc
[API Doc](https://developer.nulab-inc.com/docs/typetalk/auth#client)
"""
@spec access_token(String.t, String.t, String.t) :: {:ok, Typetalk.AccessToken.t}|{:error, HTTPoison.Response.t}
def access_token(client_id, client_secret, scope \\ @default_scope) do
{:form, [grant_type: "client_credentials", client_id: client_id, client_secret: client_secret, scope: scope]}
|> Typetalk.AccessToken.get_access_token
end
end
| 38.653846 | 114 | 0.636816 |
ff0f889f36cd8c454e058d5302850d03a60d1137 | 376 | ex | Elixir | elixir/lib/homework/shiren/prices.ex | ceejaay/web-homework | e5844609b62bdfa79a9b5b8f302c0d7ba81dc75d | [
"MIT"
] | null | null | null | elixir/lib/homework/shiren/prices.ex | ceejaay/web-homework | e5844609b62bdfa79a9b5b8f302c0d7ba81dc75d | [
"MIT"
] | null | null | null | elixir/lib/homework/shiren/prices.ex | ceejaay/web-homework | e5844609b62bdfa79a9b5b8f302c0d7ba81dc75d | [
"MIT"
] | null | null | null | defmodule Homework.Shiren.Prices do
use Ecto.Schema
import Ecto.Changeset
schema "price" do
field :amount, :integer
field :category, :string
field :type, :string
timestamps()
end
@doc false
def changeset(prices, attrs) do
prices
|> cast(attrs, [:category, :amount, :type])
|> validate_required([:category, :amount, :type])
end
end
| 18.8 | 53 | 0.659574 |
ff0f8994f475bdfe50fb17c159a556328971aa14 | 203 | exs | Elixir | test/platform/video/video_helper_test.exs | lucab85/audioslides.io | cb502ccf6ed0b2db42d9fb20bb4c963bcca3cfa9 | [
"MIT"
] | 17 | 2017-11-14T14:03:18.000Z | 2021-12-10T04:18:48.000Z | test/platform/video/video_helper_test.exs | lucab85/audioslides.io | cb502ccf6ed0b2db42d9fb20bb4c963bcca3cfa9 | [
"MIT"
] | 21 | 2017-11-19T13:38:07.000Z | 2022-02-10T00:11:14.000Z | test/platform/video/video_helper_test.exs | lucab85/audioslides.io | cb502ccf6ed0b2db42d9fb20bb4c963bcca3cfa9 | [
"MIT"
] | 2 | 2019-09-03T03:32:13.000Z | 2021-02-23T21:52:57.000Z | defmodule Platform.VideoHelperTest do
use PlatformWeb.ConnCase
import Platform.VideoHelper
alias Platform.Core.Schema.Slide
alias Platform.Core.Schema.Lesson
doctest Platform.VideoHelper
end
| 20.3 | 37 | 0.82266 |
ff0f8cae374f1b1701407746bdf06b424e6f71bd | 2,213 | ex | Elixir | clients/tool_results/lib/google_api/tool_results/v1beta3/model/suggestion_cluster_proto.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/tool_results/lib/google_api/tool_results/v1beta3/model/suggestion_cluster_proto.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/tool_results/lib/google_api/tool_results/v1beta3/model/suggestion_cluster_proto.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ToolResults.V1beta3.Model.SuggestionClusterProto do
@moduledoc """
A set of similar suggestions that we suspect are closely related.
This proto and most of the nested protos are branched from
foxandcrown.prelaunchreport.service.SuggestionClusterProto, replacing PLR's
dependencies with FTL's.
## Attributes
* `category` (*type:* `String.t`, *default:* `nil`) - Category in which these types of suggestions should appear.
Always set.
* `suggestions` (*type:* `list(GoogleApi.ToolResults.V1beta3.Model.SuggestionProto.t)`, *default:* `nil`) - A sequence of suggestions. All of the suggestions within a cluster must
have the same SuggestionPriority and belong to the same SuggestionCategory.
Suggestions with the same screenshot URL should be adjacent.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:category => String.t(),
:suggestions => list(GoogleApi.ToolResults.V1beta3.Model.SuggestionProto.t())
}
field(:category)
field(:suggestions, as: GoogleApi.ToolResults.V1beta3.Model.SuggestionProto, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.ToolResults.V1beta3.Model.SuggestionClusterProto do
def decode(value, options) do
GoogleApi.ToolResults.V1beta3.Model.SuggestionClusterProto.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ToolResults.V1beta3.Model.SuggestionClusterProto do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.824561 | 183 | 0.753728 |
ff0f96a1d4bd42d69fdfc9b374605c25b3783b6d | 780 | ex | Elixir | Microsoft.Azure.Management.Database.CosmosDb/lib/microsoft/azure/management/database/cosmos_db/model/metric_value.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | 4 | 2018-09-29T03:43:15.000Z | 2021-04-01T18:30:46.000Z | Microsoft.Azure.Management.Database.CosmosDb/lib/microsoft/azure/management/database/cosmos_db/model/metric_value.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | Microsoft.Azure.Management.Database.CosmosDb/lib/microsoft/azure/management/database/cosmos_db/model/metric_value.ex | chgeuer/ex_microsoft_azure_management | 99cd9f7f2ff1fdbe69ca5bac55b6e2af91ba3603 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule Microsoft.Azure.Management.Database.CosmosDb.Model.MetricValue do
@moduledoc """
Represents metrics values.
"""
@derive [Poison.Encoder]
defstruct [
:"_count",
:"average",
:"maximum",
:"minimum",
:"timestamp",
:"total"
]
@type t :: %__MODULE__{
:"_count" => float(),
:"average" => float(),
:"maximum" => float(),
:"minimum" => float(),
:"timestamp" => DateTime.t,
:"total" => float()
}
end
defimpl Poison.Decoder, for: Microsoft.Azure.Management.Database.CosmosDb.Model.MetricValue do
def decode(value, _options) do
value
end
end
| 21.666667 | 94 | 0.644872 |
ff0fa566de5129fdb79b77e6696638e90d3ace47 | 2,001 | ex | Elixir | exercises/mutex/mut.ex | Murre3/ID1019 | 8240d07be35843610c6c14a40bcb3ed21b3ea36f | [
"MIT"
] | null | null | null | exercises/mutex/mut.ex | Murre3/ID1019 | 8240d07be35843610c6c14a40bcb3ed21b3ea36f | [
"MIT"
] | null | null | null | exercises/mutex/mut.ex | Murre3/ID1019 | 8240d07be35843610c6c14a40bcb3ed21b3ea36f | [
"MIT"
] | null | null | null | defmodule Mutex do
# Peterson's algorithm
@type state() :: boolean() | id()
@type id() :: 1..2
def test do
c1 = spawn_link(fn -> cell(false) end)
c2 = spawn_link(fn -> cell(false) end)
q = spawn_link(fn -> cell(0) end)
p1 = spawn(fn -> lock(0, c1, c2, q, self()) end)
p2 = spawn(fn -> lock(1, c2, c1, q, self()) end)
# Gets stuck on p2's lock
wait(p1, p2, q)
end
def wait(p1, p2, q) do
receive do
{0, :done} ->
unlock(0, p1, p2, q, self())
wait(p1, p2, q)
{1, :done} ->
unlock(1, p2, p1, q, self())
wait(p1,p2, q)
_ -> wait(p1, p2, q)
end
end
def lock(id, m, p, q, from) do
IO.puts("Process #{id} set itself to true")
Mutex.set(m, true)
other = rem(id+1, 2)
IO.puts("Process #{id} set the control process to prioritize the other")
Mutex.set(q, other)
IO.puts("Checking other process in process: #{id}")
case get(p) do
:false ->
send(from, {id, :done})
:locked
:true ->
IO.puts("Checking the control process in process: #{id}")
case Mutex.get(q) do
^id ->
send(from, {id, :done})
:locked
^other ->
lock(id, m, p, q, from)
end
end
end
def unlock(_id, m, _p, _q, _from) do
Mutex.set(m, false)
end
defp cell(state) do
receive do
{:get, from} ->
send(from, {:ok, state})
cell(state)
{:set, value, from} ->
send(from, :ok)
cell(value)
end
end
def get(cell) do
send(cell, {:get, self()})
receive do
{:ok, value} -> value
end
end
def set(cell, val) do
send(cell, {:set, val, self()})
receive do
:ok -> :ok
end
end
def do_it(thing, lock) do
case Mutex.get(lock) do
:taken ->
IO.puts("Lock was taken, trying again")
do_it(thing, lock)
:open ->
thing.(5)
Mutex.set(lock, :open)
end
end
end
| 20.628866 | 76 | 0.502749 |
ff0fa7d435174e1c5eeaaca7a27e1ccce9046f9c | 2,089 | ex | Elixir | lib/ueberauth/strategy/strava/oauth.ex | syamilmj/ueberauth_strava | 6e3106c8ccf4a79db6dc7cfe88c017c4bda401ec | [
"MIT"
] | 8 | 2016-06-01T01:21:24.000Z | 2021-06-04T11:37:57.000Z | lib/ueberauth/strategy/strava/oauth.ex | syamilmj/ueberauth_strava | 6e3106c8ccf4a79db6dc7cfe88c017c4bda401ec | [
"MIT"
] | 6 | 2017-01-13T06:25:15.000Z | 2020-07-02T22:24:30.000Z | lib/ueberauth/strategy/strava/oauth.ex | syamilmj/ueberauth_strava | 6e3106c8ccf4a79db6dc7cfe88c017c4bda401ec | [
"MIT"
] | 9 | 2017-01-13T06:07:05.000Z | 2021-07-19T17:27:33.000Z | defmodule Ueberauth.Strategy.Strava.OAuth do
@moduledoc """
OAuth2 for Strava.
Add `client_id` and `client_secret` to your configuration:
config :ueberauth, Ueberauth.Strategy.Strava.OAuth,
client_id: System.get_env("Strava_APP_ID"),
client_secret: System.get_env("Strava_APP_SECRET")
"""
use OAuth2.Strategy
@defaults [
strategy: __MODULE__,
site: "https://www.strava.com/",
authorize_url: "https://www.strava.com/oauth/authorize",
token_url: "https://www.strava.com/oauth/token"
]
@doc """
Construct a client for requests to Strava.
This will be setup automatically for you in `Ueberauth.Strategy.Strava`.
These options are only useful for usage outside the normal callback phase
of Ueberauth.
"""
def client(opts \\ []) do
config = Application.get_env(:ueberauth, Ueberauth.Strategy.Strava.OAuth)
opts =
@defaults
|> Keyword.merge(config)
|> Keyword.merge(opts)
OAuth2.Client.new(opts)
end
@doc """
Provides the authorize url for the request phase of Ueberauth.
No need to call this usually.
"""
def authorize_url!(params \\ [], opts \\ []) do
opts
|> client
|> OAuth2.Client.authorize_url!(params)
end
def get(token, url, headers \\ [], opts \\ []) do
client(token: token)
|> put_param("client_secret", client().client_secret)
|> OAuth2.Client.get(url, headers, opts)
end
def get_token!(params \\ [], opts \\ []) do
opts
|> client
|> OAuth2.Client.get_token!(token_params(params))
end
def token_params(params \\ []) do
Application.get_env(:ueberauth, Ueberauth.Strategy.Strava.OAuth)
|> Keyword.take([:client_id, :client_secret])
|> Keyword.merge(params)
end
# Strategy Callbacks
def authorize_url(client, params) do
OAuth2.Strategy.AuthCode.authorize_url(client, params)
end
def get_token(client, params, headers) do
client
|> put_param(:client_secret, client.client_secret)
|> put_header("Accept", "application/json")
|> OAuth2.Strategy.AuthCode.get_token(params, headers)
end
end
| 26.443038 | 77 | 0.68023 |
ff0fc8a1f86a42d070d8f2050bc0be943d319d89 | 2,099 | ex | Elixir | clients/source_repo/lib/google_api/source_repo/v1/model/mirror_config.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/source_repo/lib/google_api/source_repo/v1/model/mirror_config.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/source_repo/lib/google_api/source_repo/v1/model/mirror_config.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SourceRepo.V1.Model.MirrorConfig do
@moduledoc """
Configuration to automatically mirror a repository from another hosting service, for example GitHub or Bitbucket.
## Attributes
* `deployKeyId` (*type:* `String.t`, *default:* `nil`) - ID of the SSH deploy key at the other hosting service. Removing this key from the other service would deauthorize Google Cloud Source Repositories from mirroring.
* `url` (*type:* `String.t`, *default:* `nil`) - URL of the main repository at the other hosting service.
* `webhookId` (*type:* `String.t`, *default:* `nil`) - ID of the webhook listening to updates to trigger mirroring. Removing this webhook from the other hosting service will stop Google Cloud Source Repositories from receiving notifications, and thereby disabling mirroring.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:deployKeyId => String.t(),
:url => String.t(),
:webhookId => String.t()
}
field(:deployKeyId)
field(:url)
field(:webhookId)
end
defimpl Poison.Decoder, for: GoogleApi.SourceRepo.V1.Model.MirrorConfig do
def decode(value, options) do
GoogleApi.SourceRepo.V1.Model.MirrorConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.SourceRepo.V1.Model.MirrorConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.603774 | 278 | 0.734636 |
ff0ffceff4ca56773ddd0e505f2f830b549c39f6 | 284 | exs | Elixir | priv/repo/migrations/20180729143035_add_sort_title_to_parent_sources_and_sources.exs | allen-garvey/block-quote-phoenix | 5c0f5d16daf6bb515a8f1846c3e4311b368a7bdb | [
"MIT"
] | 4 | 2019-10-04T16:11:15.000Z | 2021-08-18T21:00:13.000Z | apps/blockquote/priv/repo/migrations/20180729143035_add_sort_title_to_parent_sources_and_sources.exs | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | 5 | 2020-03-16T23:52:25.000Z | 2021-09-03T16:52:17.000Z | apps/blockquote/priv/repo/migrations/20180729143035_add_sort_title_to_parent_sources_and_sources.exs | allen-garvey/phoenix-umbrella | 1d444bbd62a5e7b5f51d317ce2be71ee994125d5 | [
"MIT"
] | null | null | null | defmodule Blockquote.Repo.Migrations.AddSortTitleToParentSources do
use Ecto.Migration
def change do
alter table(:parent_sources) do
add :sort_title, :string, null: false
end
alter table(:sources) do
add :sort_title, :string, null: false
end
end
end
| 21.846154 | 67 | 0.711268 |
ff1030915d41a6e5e44cf825e209922f4e6dec1f | 1,373 | exs | Elixir | apps/site/test/site_web/plugs/remote_ip_test.exs | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 42 | 2019-05-29T16:05:30.000Z | 2021-08-09T16:03:37.000Z | apps/site/test/site_web/plugs/remote_ip_test.exs | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 872 | 2019-05-29T17:55:50.000Z | 2022-03-30T09:28:43.000Z | apps/site/test/site_web/plugs/remote_ip_test.exs | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 12 | 2019-07-01T18:33:21.000Z | 2022-03-10T02:13:57.000Z | defmodule SiteWeb.Plugs.RemoteIpTest do
use SiteWeb.ConnCase, async: true
setup %{conn: conn} do
%{conn: %{conn | remote_ip: {127, 0, 0, 1}}}
end
describe "call/2" do
test "sets remote_ip when an IP address is in X-Forwarded-For header", %{conn: conn} do
conn =
conn
|> Plug.Conn.put_req_header("x-forwarded-for", "18.0.0.1")
|> SiteWeb.Plugs.RemoteIp.call(init())
assert conn.remote_ip == {18, 0, 0, 1}
end
test "uses last IP address when multiple", %{conn: conn} do
conn =
conn
|> Plug.Conn.put_req_header("x-forwarded-for", "18.0.0.1, 18.0.0.2")
|> SiteWeb.Plugs.RemoteIp.call(init())
assert conn.remote_ip == {18, 0, 0, 2}
end
test "handles when header is missing", %{conn: conn} do
assert SiteWeb.Plugs.RemoteIp.call(conn, init()).remote_ip == {127, 0, 0, 1}
end
test "handles if IP address is malformed", %{conn: conn} do
conn =
conn
|> Plug.Conn.put_req_header("x-forwarded-for", "malformed")
|> SiteWeb.Plugs.RemoteIp.call(init())
assert conn.remote_ip == {127, 0, 0, 1}
end
end
describe "format/1" do
test "prints IP address correctly" do
assert SiteWeb.Plugs.RemoteIp.format({127, 0, 0, 1}) == "127.0.0.1"
end
end
defp init do
SiteWeb.Plugs.RemoteIp.init([])
end
end
| 26.921569 | 91 | 0.600146 |
ff104fee27da001df33af07b75a5d41d51a71fea | 2,390 | ex | Elixir | lib/tentacat/repositories/branches.ex | wesleimp/tentacat | 6e4de63dd19ae719f2b59d4f289979a91e3cb5e2 | [
"MIT"
] | null | null | null | lib/tentacat/repositories/branches.ex | wesleimp/tentacat | 6e4de63dd19ae719f2b59d4f289979a91e3cb5e2 | [
"MIT"
] | null | null | null | lib/tentacat/repositories/branches.ex | wesleimp/tentacat | 6e4de63dd19ae719f2b59d4f289979a91e3cb5e2 | [
"MIT"
] | null | null | null | defmodule Tentacat.Repositories.Branches do
import Tentacat
alias Tentacat.Client
@doc """
List Branches
## Example
Tentacat.Repositories.Branches.list "elixir-lang", "elixir"
Tentacat.Repositories.Branches.list client, "elixir-lang", "elixir"
More info at: https://developer.github.com/v3/repos/#list-branches
"""
@spec list(Client.t(), binary, binary) :: Tentacat.response()
def list(client \\ %Client{}, owner, repo) do
get("repos/#{owner}/#{repo}/branches", client)
end
@doc """
Get Branch
## Example
Tentacat.Repositories.Branches.find "elixir-lang", "elixir", "feature"
Tentacat.Repositories.Branches.find client, "elixir-lang", "elixir", "feature"
More info at: https://developer.github.com/v3/repos/#get-branch
"""
@spec find(Client.t(), binary, binary, binary) :: Tentacat.response()
def find(client \\ %Client{}, owner, repo, branch) do
get("repos/#{owner}/#{repo}/branches/#{branch}", client)
end
@doc """
Update Branch Protection
Create body example:
%{
"required_status_checks" => %{
"strict" => true,
"contexts" => [
"continuous-integration/travis-ci"
]
},
"enforce_admins" => true,
"required_pull_request_reviews" => %{
"dismissal_restrictions" => %{
"users" => [
"octocat"
],
"teams" => [
"justice-league"
]
},
"dismiss_stale_reviews" => true,
"require_code_owner_reviews" => true,
"required_approving_review_count" => 2
},
"restrictions" => %{
"users" => [
"octocat"
],
"teams" => [
"justice-league"
]
}
}
## Example
Tentacat.Repositories.Branches.update_protection "elixir-lang", "elixir", "feature", body
Tentacat.Repositories.Branches.update_protection client, "elixir-lang", "elixir", "feature", body
More info at: https://developer.github.com/v3/repos/branches/#update-branch-protection
"""
@spec update_protection(Client.t(), binary, binary, binary, map) :: Tentacat.response()
def update_protection(client \\ %Client{}, owner, repo, branch, body) when is_map(body) do
put("repos/#{owner}/#{repo}/branches/#{branch}/protection", client, body)
end
end
| 28.795181 | 103 | 0.593724 |
ff10aee6810d5fe97ee9ead34f6bbe5e2f947ba1 | 884 | ex | Elixir | apps/legion/lib/identity/auth/insecure/blacklist/entry.ex | i386-64/legion | 41ae99af9be962d7fb38726ddf4bb0456edb5ca4 | [
"Apache-2.0"
] | 1 | 2021-01-04T11:06:12.000Z | 2021-01-04T11:06:12.000Z | apps/legion/lib/identity/auth/insecure/blacklist/entry.ex | i386-64/legion | 41ae99af9be962d7fb38726ddf4bb0456edb5ca4 | [
"Apache-2.0"
] | 3 | 2021-01-30T06:40:37.000Z | 2021-01-30T06:41:08.000Z | apps/legion/lib/identity/auth/insecure/blacklist/entry.ex | i386-64/legion | 41ae99af9be962d7fb38726ddf4bb0456edb5ca4 | [
"Apache-2.0"
] | null | null | null | defmodule Legion.Identity.Auth.Insecure.Blacklist.Entry do
@moduledoc """
Represents a password blacklisting entry.
## Schema fields
- `:authority_id`: The identifier of the authority added the entry. *Nullable*. Entries containing this attribute are known to be generated at runtime.
- `:hash`: The hash ought to be blacklisted.
- `:inserted_at`: The time of the blacklisting.
"""
use Legion.Stereotype, :model
alias Legion.Identity.Information.Registration, as: User
schema "password_blacklist_entries" do
belongs_to :authority, User
field :hash, :binary
field :inserted_at, :naive_datetime_usec, read_after_writes: true
end
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:authority_id, :hash])
|> validate_required([:hash])
|> foreign_key_constraint(:authority_id)
|> unique_constraint(:hash)
end
end
| 30.482759 | 153 | 0.720588 |
ff10da3726b26a71014885887d9707aab26dfdca | 1,535 | ex | Elixir | clients/domains/lib/google_api/domains/v1/model/test_iam_permissions_response.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/domains/lib/google_api/domains/v1/model/test_iam_permissions_response.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/domains/lib/google_api/domains/v1/model/test_iam_permissions_response.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Domains.V1.Model.TestIamPermissionsResponse do
@moduledoc """
Response message for `TestIamPermissions` method.
## Attributes
* `permissions` (*type:* `list(String.t)`, *default:* `nil`) - A subset of `TestPermissionsRequest.permissions` that the caller is allowed.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:permissions => list(String.t()) | nil
}
field(:permissions, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Domains.V1.Model.TestIamPermissionsResponse do
def decode(value, options) do
GoogleApi.Domains.V1.Model.TestIamPermissionsResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Domains.V1.Model.TestIamPermissionsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.659574 | 143 | 0.745928 |
ff10dcd446da1211978032d88c02e04f6a6d369e | 276 | ex | Elixir | lib/nsq/json.ex | blueshift-labs/elixir_nsq | 35711fc5f4091f7f1983ffde7ba4f73259c75848 | [
"MIT"
] | null | null | null | lib/nsq/json.ex | blueshift-labs/elixir_nsq | 35711fc5f4091f7f1983ffde7ba4f73259c75848 | [
"MIT"
] | null | null | null | lib/nsq/json.ex | blueshift-labs/elixir_nsq | 35711fc5f4091f7f1983ffde7ba4f73259c75848 | [
"MIT"
] | null | null | null | defmodule NSQ.Json do
@moduledoc "Behaviour for json encoding and decoding"
@callback encode!(any(), list()) :: String.t() | no_return
@callback decode(iodata, list()) :: {:ok, String.t(), {:error, any()}}
@callback decode!(iodata(), list()) :: any() | no_return
end
| 34.5 | 72 | 0.648551 |
ff111b5ac150a9735e2c165a20746094a6147a27 | 506 | ex | Elixir | lib/app_api_web/v1/serializers/json/conversation_message_serializer.ex | turbo-play/phoenix-services | 9d3318c6b5d5ce6dead441758fc86ec34892b0f6 | [
"Apache-2.0"
] | 2 | 2018-07-05T15:13:12.000Z | 2021-12-16T08:58:56.000Z | lib/app_api_web/v1/serializers/json/conversation_message_serializer.ex | turbo-play/phoenix-services | 9d3318c6b5d5ce6dead441758fc86ec34892b0f6 | [
"Apache-2.0"
] | null | null | null | lib/app_api_web/v1/serializers/json/conversation_message_serializer.ex | turbo-play/phoenix-services | 9d3318c6b5d5ce6dead441758fc86ec34892b0f6 | [
"Apache-2.0"
] | 2 | 2018-08-13T21:37:00.000Z | 2021-11-10T04:08:26.000Z | defmodule AppApiWeb.V1.JSON.ConversationMessageSerializer do
alias AppApi.ConversationMessage, as: CM
alias AppApiWeb.V1.JSON.UserSerializer
alias AppApi.Convert
@moduledoc """
Serializes data into V1 JSON response format.
"""
use AppApiWeb.V1
def serialize(%CM{} = c) when not is_nil(c) do
%{
id: c.id,
creationDate: Convert.to_timestamp(c.inserted_at),
userId: c.user_id,
type: c.type,
content: c.content
}
end
def serialize() do %{} end
end
| 21.083333 | 60 | 0.675889 |
ff11274537f2daf2c36c6823782a631acaf6c71d | 511 | ex | Elixir | web/lib/infolab_light_games_web/views/error_view.ex | simmsb/infolab-lights | 1cd685029c50cde0de84176efd65fb0cabc6c820 | [
"MIT"
] | 5 | 2021-11-04T08:31:46.000Z | 2021-11-07T12:15:57.000Z | web/lib/infolab_light_games_web/views/error_view.ex | simmsb/infolab-lights | 1cd685029c50cde0de84176efd65fb0cabc6c820 | [
"MIT"
] | 47 | 2020-08-03T09:30:46.000Z | 2021-03-01T08:18:51.000Z | web/lib/infolab_light_games_web/views/error_view.ex | nitros12/something-secret | 1cd685029c50cde0de84176efd65fb0cabc6c820 | [
"MIT"
] | 2 | 2021-11-04T10:46:41.000Z | 2022-02-22T18:05:44.000Z | defmodule InfolabLightGamesWeb.ErrorView do
use InfolabLightGamesWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 30.058824 | 61 | 0.745597 |
ff11383357b2bd3c3b74f32d6572fe9dc7d77731 | 1,063 | ex | Elixir | apps/nightcrawler_web/test/support/conn_case.ex | ZucchiniZe/nightcrawler_elixir | be82b0e7c25f55c7d2c2d3aa2860371cb68aeeb6 | [
"MIT"
] | null | null | null | apps/nightcrawler_web/test/support/conn_case.ex | ZucchiniZe/nightcrawler_elixir | be82b0e7c25f55c7d2c2d3aa2860371cb68aeeb6 | [
"MIT"
] | null | null | null | apps/nightcrawler_web/test/support/conn_case.ex | ZucchiniZe/nightcrawler_elixir | be82b0e7c25f55c7d2c2d3aa2860371cb68aeeb6 | [
"MIT"
] | null | null | null | defmodule NightcrawlerWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
import NightcrawlerWeb.Router.Helpers
# The default endpoint for testing
@endpoint NightcrawlerWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Nightcrawler.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Nightcrawler.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 27.25641 | 74 | 0.726246 |
ff118151a3b6d66365f9a3d8dc2802059894ee1f | 348 | exs | Elixir | installer/templates/phx_umbrella/apps/app_name/config/config.exs | mirego/phoenix | 5871888b77b9d34f9a4a33a3644b87f91c8b30ed | [
"MIT"
] | 1 | 2018-07-26T10:42:26.000Z | 2018-07-26T10:42:26.000Z | installer/templates/phx_umbrella/apps/app_name/config/config.exs | mirego/phoenix | 5871888b77b9d34f9a4a33a3644b87f91c8b30ed | [
"MIT"
] | null | null | null | installer/templates/phx_umbrella/apps/app_name/config/config.exs | mirego/phoenix | 5871888b77b9d34f9a4a33a3644b87f91c8b30ed | [
"MIT"
] | null | null | null | # Since configuration is shared in umbrella projects, this file
# should only configure the :<%= app_name %> application itself
# and only for organization purposes. All other config goes to
# the umbrella root.
use Mix.Config
<%= if ecto do %>config :<%= app_name %>, ecto_repos: [<%= app_module %>.Repo]<% end %>
import_config "#{Mix.env}.exs"
| 34.8 | 87 | 0.70977 |
ff11b13b90d8ff9c18323fd7132d54b646b5d8df | 9,910 | exs | Elixir | lib/elixir/test/elixir/code_normalizer/formatted_ast_test.exs | koudelka/elixir | e0c60c68a00d0ed9cdbfcb8c9c16f9ef9cd09222 | [
"Apache-2.0"
] | 2 | 2021-11-10T01:18:41.000Z | 2021-11-10T01:35:54.000Z | lib/elixir/test/elixir/code_normalizer/formatted_ast_test.exs | koudelka/elixir | e0c60c68a00d0ed9cdbfcb8c9c16f9ef9cd09222 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/code_normalizer/formatted_ast_test.exs | koudelka/elixir | e0c60c68a00d0ed9cdbfcb8c9c16f9ef9cd09222 | [
"Apache-2.0"
] | null | null | null | Code.require_file("../test_helper.exs", __DIR__)
defmodule Code.Normalizer.FormatterASTTest do
use ExUnit.Case, async: true
defmacro assert_same(good, opts \\ []) do
quote bind_quoted: [good: good, opts: opts], location: :keep do
assert IO.iodata_to_binary(Code.format_string!(good, opts)) ==
string_to_string(good, opts)
end
end
def string_to_string(good, opts) do
line_length = Keyword.get(opts, :line_length, 98)
good = String.trim(good)
to_quoted_opts =
Keyword.merge(
[
literal_encoder: &{:ok, {:__block__, &2, [&1]}},
token_metadata: true,
unescape: false
],
opts
)
{quoted, comments} = Code.string_to_quoted_with_comments!(good, to_quoted_opts)
to_algebra_opts = [comments: comments, escape: false] ++ opts
quoted
|> Code.quoted_to_algebra(to_algebra_opts)
|> Inspect.Algebra.format(line_length)
|> IO.iodata_to_binary()
end
describe "integers" do
test "in decimal base" do
assert_same "0"
assert_same "100"
assert_same "007"
assert_same "10000"
assert_same "100_00"
end
test "in binary base" do
assert_same "0b0"
assert_same "0b1"
assert_same "0b101"
assert_same "0b01"
assert_same "0b111_111"
end
test "in octal base" do
assert_same "0o77"
assert_same "0o0"
assert_same "0o01"
assert_same "0o777_777"
end
test "in hex base" do
assert_same "0x1"
assert_same "0x01"
end
test "as chars" do
assert_same "?a"
assert_same "?1"
assert_same "?è"
assert_same "??"
assert_same "?\\\\"
assert_same "?\\s"
assert_same "?🎾"
end
end
describe "floats" do
test "with normal notation" do
assert_same "0.0"
assert_same "1.0"
assert_same "123.456"
assert_same "0.0000001"
assert_same "001.100"
assert_same "0_10000_0.000_000"
end
test "with scientific notation" do
assert_same "1.0e1"
assert_same "1.0e-1"
assert_same "1.0e01"
assert_same "1.0e-01"
assert_same "001.100e-010"
assert_same "0_100_0000.100e-010"
end
end
describe "atoms" do
test "true, false, nil" do
assert_same "nil"
assert_same "true"
assert_same "false"
end
test "without escapes" do
assert_same ~S[:foo]
end
test "with escapes" do
assert_same ~S[:"f\a\b\ro"]
end
test "with unicode" do
assert_same ~S[:ólá]
end
test "does not reformat aliases" do
assert_same ~S[:"Elixir.String"]
end
test "quoted operators" do
assert_same ~S[:"::"]
assert_same ~S[:"..//"]
assert_same ~S{["..//": 1]}
end
test "with interpolation" do
assert_same ~S[:"one #{2} three"]
end
test "with escapes and interpolation" do
assert_same ~S[:"one\n\"#{2}\"\nthree"]
end
end
describe "strings" do
test "without escapes" do
assert_same ~S["foo"]
end
test "with escapes" do
assert_same ~S["\x0A"]
assert_same ~S["f\a\b\ro"]
assert_same ~S["double \" quote"]
end
test "keeps literal new lines" do
assert_same """
"fo
o"
"""
end
test "with interpolation" do
assert_same ~S["one #{} three"]
assert_same ~S["one #{2} three"]
end
test "with escaped interpolation" do
assert_same ~S["one\#{two}three"]
end
test "with escapes and interpolation" do
assert_same ~S["one\n\"#{2}\"\nthree"]
end
end
describe "charlists" do
test "without escapes" do
assert_same ~S['']
assert_same ~S[' ']
assert_same ~S['foo']
end
test "with escapes" do
assert_same ~S['f\a\b\ro']
assert_same ~S['single \' quote']
end
test "keeps literal new lines" do
assert_same """
'fo
o'
"""
end
test "with interpolation" do
assert_same ~S['one #{2} three']
end
test "with escape and interpolation" do
assert_same ~S['one\n\'#{2}\'\nthree']
end
end
describe "string heredocs" do
test "without escapes" do
assert_same ~S'''
"""
hello
"""
'''
end
test "with escapes" do
assert_same ~S'''
"""
f\a\b\ro
"""
'''
assert_same ~S'''
"""
multiple "\"" quotes
"""
'''
end
test "with interpolation" do
assert_same ~S'''
"""
one
#{2}
three
"""
'''
assert_same ~S'''
"""
one
"
#{2}
"
three
"""
'''
end
test "nested with empty lines" do
assert_same ~S'''
nested do
"""
foo
bar
"""
end
'''
end
test "nested with empty lines and interpolation" do
assert_same ~S'''
nested do
"""
#{foo}
#{bar}
"""
end
'''
assert_same ~S'''
nested do
"""
#{foo}
#{bar}
"""
end
'''
end
test "with escaped new lines" do
assert_same ~S'''
"""
one\
#{"two"}\
three\
"""
'''
end
end
describe "charlist heredocs" do
test "without escapes" do
assert_same ~S"""
'''
hello
'''
"""
end
test "with escapes" do
assert_same ~S"""
'''
f\a\b\ro
'''
"""
assert_same ~S"""
'''
multiple "\"" quotes
'''
"""
end
test "with interpolation" do
assert_same ~S"""
'''
one
#{2}
three
'''
"""
assert_same ~S"""
'''
one
"
#{2}
"
three
'''
"""
end
end
describe "keyword list" do
test "blocks" do
assert_same ~S"""
defmodule Example do
def sample, do: :ok
end
"""
end
test "omitting brackets" do
assert_same ~S"""
@type foo :: a when b: :c
"""
end
test "last tuple element as keyword list keeps its format" do
assert_same ~S"{:wrapped, [opt1: true, opt2: false]}"
assert_same ~S"{:unwrapped, opt1: true, opt2: false}"
assert_same ~S"{:wrapped, 1, [opt1: true, opt2: false]}"
assert_same ~S"{:unwrapped, 1, opt1: true, opt2: false}"
end
end
describe "preserves user choice on parenthesis" do
test "in functions with do blocks" do
assert_same(~S"""
foo Bar do
:ok
end
""")
assert_same(~S"""
foo(Bar) do
:ok
end
""")
end
end
describe "preserves formatting for sigils" do
test "without interpolation" do
assert_same ~S[~s(foo)]
assert_same ~S[~s{foo bar}]
assert_same ~S[~r/Bar Baz/]
assert_same ~S[~w<>]
assert_same ~S[~W()]
end
test "with escapes" do
assert_same ~S[~s(foo \) bar)]
assert_same ~S[~s(f\a\b\ro)]
assert_same ~S"""
~S(foo\
bar)
"""
end
test "with nested new lines" do
assert_same ~S"""
foo do
~S(foo\
bar)
end
"""
assert_same ~S"""
foo do
~s(#{bar}
)
end
"""
end
test "with interpolation" do
assert_same ~S[~s(one #{2} three)]
end
test "with modifiers" do
assert_same ~S[~w(one two three)a]
assert_same ~S[~z(one two three)foo]
end
test "with heredoc syntax" do
assert_same ~S"""
~s'''
one\a
#{:two}\r
three\0
'''
"""
assert_same ~S'''
~s"""
one\a
#{:two}\r
three\0
"""
'''
end
test "with heredoc syntax and modifier" do
assert_same ~S"""
~s'''
foo
'''rsa
"""
end
end
describe "preserves comments formatting" do
test "before and after expressions" do
assert_same """
# before comment
:hello
"""
assert_same """
:hello
# after comment
"""
assert_same """
# before comment
:hello
# after comment
"""
end
test "empty comment" do
assert_same """
#
:foo
"""
end
test "handles comments with unescaped literal" do
assert_same """
# before
Mix.install([:foo])
# after
""",
literal_encoder: fn literal, _ -> {:ok, literal} end
assert_same """
# before
Mix.install([1 + 2, :foo])
# after
""",
literal_encoder: fn literal, _ -> {:ok, literal} end
assert_same """
# before
Mix.install([:foo, 1 + 2])
# after
""",
literal_encoder: fn literal, _ -> {:ok, literal} end
end
test "before and after expressions with newlines" do
assert_same """
# before comment
# second line
:hello
# middle comment 1
#
# middle comment 2
:world
# after comment
# second line
"""
end
test "interpolation with comment outside before and after" do
assert_same ~S"""
# comment
IO.puts("Hello #{world}")
"""
assert_same ~S"""
IO.puts("Hello #{world}")
# comment
"""
end
test "blocks with keyword list" do
assert_same ~S"""
defp sample do
[
# comment
{:a, "~> 1.2"}
]
end
"""
assert_same ~S"""
defp sample do
[
# comment
{:a, "~> 1.2"},
{:b, "~> 1.2"}
]
end
"""
end
end
end
| 18.117002 | 83 | 0.496468 |
ff11cc8093fda668b99fab135190e56d4ab50807 | 61 | ex | Elixir | web/views/page_view.ex | mfunaro/handiman-phoenix | 5d94114f37561647096f1695e3f0a3a3d1c7ca34 | [
"MIT",
"Unlicense"
] | 1 | 2015-08-15T02:40:27.000Z | 2015-08-15T02:40:27.000Z | web/views/page_view.ex | mfunaro/handiman-phoenix | 5d94114f37561647096f1695e3f0a3a3d1c7ca34 | [
"MIT",
"Unlicense"
] | 1 | 2015-09-11T00:02:33.000Z | 2015-09-11T03:24:21.000Z | web/views/page_view.ex | mfunaro/handiman-phoenix | 5d94114f37561647096f1695e3f0a3a3d1c7ca34 | [
"MIT",
"Unlicense"
] | null | null | null | defmodule Handiman.PageView do
use Handiman.Web, :view
end
| 15.25 | 30 | 0.786885 |
ff11e6e6b5a6e746e24ccafc055e414e2a87dbcd | 74 | exs | Elixir | test/test_helper.exs | necosta/raspberry-pi-x | 478f49de74b03d4a652c35ef185735ca0f2f6878 | [
"MIT"
] | null | null | null | test/test_helper.exs | necosta/raspberry-pi-x | 478f49de74b03d4a652c35ef185735ca0f2f6878 | [
"MIT"
] | null | null | null | test/test_helper.exs | necosta/raspberry-pi-x | 478f49de74b03d4a652c35ef185735ca0f2f6878 | [
"MIT"
] | null | null | null | ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(RaspberryPi.Repo, :manual)
| 18.5 | 57 | 0.783784 |
ff11e99aaf57161024e1fc76a5477c2608ee889f | 1,101 | ex | Elixir | lib/ddstest.ex | b5g-ex/bdds | 049677b7e02df7d8c084de716698bdc72a840e0d | [
"Apache-2.0"
] | null | null | null | lib/ddstest.ex | b5g-ex/bdds | 049677b7e02df7d8c084de716698bdc72a840e0d | [
"Apache-2.0"
] | 1 | 2022-02-19T12:14:55.000Z | 2022-02-27T04:23:37.000Z | lib/ddstest.ex | b5g-ex/bdds | 049677b7e02df7d8c084de716698bdc72a840e0d | [
"Apache-2.0"
] | null | null | null | defmodule Ddstest do
@moduledoc """
Documentation for `Ddstest`.
"""
# defstruct handle: nil
@doc """
Hello world.
## Examples
iex> Ddstest.hello()
:world
"""
def hello do
:world
end
def create_publisher() do
Ddstest.ddstest_create_publisher()
end
def create_subscriber() do
Ddstest.ddstest_create_subscriber()
end
def delete() do
Ddstest.ddstest_delete()
end
def sendmsg(msg) do
Ddstest.ddstest_sendmsg(msg)
end
def test() do
Ddstest.ddstest_test()
end
# @on_load :load_nif
def load_nif do
nif_file = Application.app_dir(:bdds, "priv/ddstest_nif")
:erlang.load_nif(nif_file, 0)
end
def ddstest_create_publisher(), do: raise("NIF ddstest_create_publisher/0 not implemented")
def ddstest_create_subscriber(), do: raise("NIF ddstest_create_subscriber/0 not implemented")
def ddstest_sendmsg(_a), do: raise("NIF ddstest_sendmsg/1 not implemented")
def ddstest_delete(), do: raise("NIF ddstest_delete/0 not implemented")
def ddstest_test(), do: raise("NIF ddstest_test/0 not implemented")
end
| 20.773585 | 95 | 0.701181 |
ff12018c43d8548b50b69169dab9356fdf734f38 | 1,116 | ex | Elixir | lib/koans/20_comprehensions.ex | bernardini687/elixir-koans | db9f15c4bb3237eab509e9bb1a53b3550609875c | [
"MIT"
] | 1 | 2020-11-23T17:01:37.000Z | 2020-11-23T17:01:37.000Z | lib/koans/20_comprehensions.ex | bernardini687/elixir-koans | db9f15c4bb3237eab509e9bb1a53b3550609875c | [
"MIT"
] | null | null | null | lib/koans/20_comprehensions.ex | bernardini687/elixir-koans | db9f15c4bb3237eab509e9bb1a53b3550609875c | [
"MIT"
] | null | null | null | defmodule Comprehensions do
use Koans
@intro "A comprehension is made of three parts: generators, filters, and collectibles. We will look at how these interact with eachother"
koan "The generator, `n <- [1, 2, 3, 4]`, is providing the values for our comprehension" do
assert (for n <- [1, 2, 3, 4], do: n * n) == ___
end
koan "Any enumerable can be a generator" do
assert (for n <- 1..4, do: n * n) == ___
end
koan "A generator specifies how to extract values from a collection" do
collection = [["Hello","World"], ["Apple", "Pie"]]
assert (for [a, b] <- collection, do: "#{a} #{b}") == ___
end
koan "You can use multiple generators at once" do
assert (for x <- ["little", "big"], y <- ["dogs", "cats"], do: "#{x} #{y}") == ___
end
koan "Use a filter to reduce your work" do
assert (for n <- [1, 2, 3, 4, 5, 6], n > 3, do: n) == ___
end
koan "Add the result of a comprehension to an existing collection" do
collection = ["Apple Pie"]
collection = for x <- ["Pecan", "Pumpkin"], into: collection, do: "#{x} Pie"
assert collection == ___
end
end
| 32.823529 | 139 | 0.612007 |
ff121b9d1227ea60fa21f3a59986c1118f9afb24 | 1,577 | ex | Elixir | lib/habex_web.ex | richardpanda/habex | ba7869f11d8c736db3cbedfb326c754a2d79bec8 | [
"MIT"
] | null | null | null | lib/habex_web.ex | richardpanda/habex | ba7869f11d8c736db3cbedfb326c754a2d79bec8 | [
"MIT"
] | null | null | null | lib/habex_web.ex | richardpanda/habex | ba7869f11d8c736db3cbedfb326c754a2d79bec8 | [
"MIT"
] | null | null | null | defmodule HabexWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use HabexWeb, :controller
use HabexWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: HabexWeb
import Plug.Conn
import HabexWeb.Router.Helpers
import HabexWeb.Gettext
end
end
def view do
quote do
use Phoenix.View, root: "lib/habex_web/templates",
namespace: HabexWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import HabexWeb.Router.Helpers
import HabexWeb.ErrorHelpers
import HabexWeb.Gettext
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import HabexWeb.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 23.191176 | 69 | 0.681674 |
ff12334c22be3a3d3016e05c2daa69a01c2665f8 | 1,456 | exs | Elixir | test/ex_check/project_cases/application_mod_test.exs | gerbal/ex_check | 1247075a64d17f69c3e6e9699bd95664cc128466 | [
"MIT"
] | 225 | 2019-07-21T14:44:17.000Z | 2022-03-31T11:08:07.000Z | test/ex_check/project_cases/application_mod_test.exs | gerbal/ex_check | 1247075a64d17f69c3e6e9699bd95664cc128466 | [
"MIT"
] | 23 | 2019-07-30T03:05:42.000Z | 2022-03-06T18:11:50.000Z | test/ex_check/project_cases/application_mod_test.exs | gerbal/ex_check | 1247075a64d17f69c3e6e9699bd95664cc128466 | [
"MIT"
] | 9 | 2019-11-23T23:04:39.000Z | 2022-03-29T00:54:34.000Z | defmodule ExCheck.ProjectCases.ApplicationModTest do
use ExCheck.ProjectCase, async: true
@application """
defmodule TestProject.Application do
use Application
def start(_type, _args) do
children = []
opts = [strategy: :one_for_one, name: ExCheck.Supervisor]
if Mix.env() == :test do
Application.put_env(:test_project, :app_started, true, persistent: true)
else
raise("running app!")
end
Supervisor.start_link(children, opts)
end
def do_sth_with_running_app do
unless Application.get_env(:test_project, :app_started), do: raise("not running app!")
:ok
end
end
"""
@application_test """
defmodule TestProject.ApplicationTest do
use ExUnit.Case
test "app running" do
assert :ok = TestProject.Application.do_sth_with_running_app()
end
end
"""
test "application mod", %{project_dir: project_dir} do
set_mix_app_mod(project_dir, "TestProject.Application")
application_path = Path.join([project_dir, "lib", "application.ex"])
File.write!(application_path, @application)
application_test_path = Path.join([project_dir, "test", "application_test.exs"])
File.write!(application_test_path, @application_test)
assert {output, 0} = System.cmd("mix", ~w[check], cd: project_dir)
assert output =~ "compiler success"
assert output =~ "formatter success"
assert output =~ "ex_unit success"
end
end
| 26 | 92 | 0.686126 |
ff128003f16734537f92b4818e403de4aa5bfe95 | 1,320 | exs | Elixir | test/telemetry_filter_test.exs | akoutmos/plug_telemetry_filter | 98533b1185254a78124826313b83fa7fdde30ede | [
"MIT"
] | 2 | 2019-12-25T21:26:07.000Z | 2019-12-26T01:42:02.000Z | test/telemetry_filter_test.exs | akoutmos/telemetry_filter | 98533b1185254a78124826313b83fa7fdde30ede | [
"MIT"
] | null | null | null | test/telemetry_filter_test.exs | akoutmos/telemetry_filter | 98533b1185254a78124826313b83fa7fdde30ede | [
"MIT"
] | null | null | null | defmodule TelemetryFilterTest do
use ExUnit.Case
alias Plug.Conn
describe "TelemetryFilter" do
test "should attach before_send callback for Plug.Telemetry for non filtered routes" do
conn = %Conn{request_path: "/users/12345"}
filter_plug_opts =
TelemetryFilter.init(filter_endpoints: ["/metrics", "/health"], event_prefix: [:phoenix, :endpoint])
[before_send] = TelemetryFilter.call(conn, filter_plug_opts).before_send
assert Function.info(before_send, :module) == {:module, Plug.Telemetry}
end
test "should attach before_send callback for Plug.Telemetry when no configured filters are provided" do
conn = %Conn{request_path: "/users/12345"}
filter_plug_opts = TelemetryFilter.init(event_prefix: [:phoenix, :endpoint])
[before_send] = TelemetryFilter.call(conn, filter_plug_opts).before_send
assert Function.info(before_send, :module) == {:module, Plug.Telemetry}
end
test "should not attach before_send callback for Plug.Telemetry for filtered routes" do
conn = %Conn{request_path: "/metrics"}
filter_plug_opts =
TelemetryFilter.init(filter_endpoints: ["/metrics", "/health"], event_prefix: [:phoenix, :endpoint])
assert [] == TelemetryFilter.call(conn, filter_plug_opts).before_send
end
end
end
| 37.714286 | 108 | 0.716667 |
ff128c8d0ac5e3eb04b767877823ad5c0743daa0 | 4,419 | exs | Elixir | lib/elixir/test/elixir/kernel/tracers_test.exs | felipelincoln/elixir | 6724c1d1819f2926dac561980b4beab281bbd3c2 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/kernel/tracers_test.exs | felipelincoln/elixir | 6724c1d1819f2926dac561980b4beab281bbd3c2 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/kernel/tracers_test.exs | felipelincoln/elixir | 6724c1d1819f2926dac561980b4beab281bbd3c2 | [
"Apache-2.0"
] | null | null | null | Code.require_file("../test_helper.exs", __DIR__)
defmodule Kernel.TracersTest do
use ExUnit.Case
import Code, only: [compile_string: 1]
def trace(event, %Macro.Env{} = env) do
send(self(), {event, env})
:ok
end
setup_all do
Code.put_compiler_option(:tracers, [__MODULE__])
Code.put_compiler_option(:parser_options, columns: true)
on_exit(fn ->
Code.put_compiler_option(:tracers, [])
Code.put_compiler_option(:parser_options, [])
end)
end
test "traces start and stop" do
compile_string("""
Foo
""")
assert_receive {:start, %{lexical_tracker: pid}} when is_pid(pid)
assert_receive {:stop, %{lexical_tracker: pid}} when is_pid(pid)
end
test "traces alias references" do
compile_string("""
Foo
""")
assert_receive {{:alias_reference, meta, Foo}, _}
assert meta[:line] == 1
assert meta[:column] == 1
end
test "traces aliases" do
compile_string("""
alias Hello.World
World
alias Foo, as: Bar, warn: true
Bar
""")
assert_receive {{:alias, meta, Hello.World, World, []}, _}
assert meta[:line] == 1
assert meta[:column] == 1
assert_receive {{:alias_expansion, meta, World, Hello.World}, _}
assert meta[:line] == 2
assert meta[:column] == 1
assert_receive {{:alias, meta, Foo, Bar, [as: Bar, warn: true]}, _}
assert meta[:line] == 4
assert meta[:column] == 1
assert_receive {{:alias_expansion, meta, Bar, Foo}, _}
assert meta[:line] == 5
assert meta[:column] == 1
end
test "traces imports" do
compile_string("""
import Integer, only: [is_odd: 1, parse: 1]
true = is_odd(1)
{1, ""} = parse("1")
""")
assert_receive {{:import, meta, Integer, only: [is_odd: 1, parse: 1]}, _}
assert meta[:line] == 1
assert meta[:column] == 1
assert_receive {{:imported_macro, meta, Integer, :is_odd, 1}, _}
assert meta[:line] == 2
assert meta[:column] == 8
assert_receive {{:imported_function, meta, Integer, :parse, 1}, _}
assert meta[:line] == 3
assert meta[:column] == 11
end
test "traces structs" do
compile_string("""
%URI{path: "/"}
""")
assert_receive {{:struct_expansion, meta, URI, [:path]}, _}
assert meta[:line] == 1
assert meta[:column] == 1
end
test "traces remote" do
compile_string("""
require Integer
true = Integer.is_odd(1)
{1, ""} = Integer.parse("1")
""")
assert_receive {{:remote_macro, meta, Integer, :is_odd, 1}, _}
assert meta[:line] == 2
assert meta[:column] == 16
assert_receive {{:remote_function, meta, Integer, :parse, 1}, _}
assert meta[:line] == 3
assert meta[:column] == 19
end
test "traces remote via captures" do
compile_string("""
require Integer
&Integer.is_odd/1
&Integer.parse/1
""")
assert_receive {{:remote_macro, meta, Integer, :is_odd, 1}, _}
assert meta[:line] == 2
assert meta[:column] == 1
assert_receive {{:remote_function, meta, Integer, :parse, 1}, _}
assert meta[:line] == 3
assert meta[:column] == 1
end
test "traces locals" do
compile_string("""
defmodule Sample do
defmacro foo(arg), do: arg
def bar(arg), do: arg
def baz(arg), do: foo(arg) + bar(arg)
end
""")
assert_receive {{:defmodule, _meta}, %{module: Sample}}
assert_receive {{:local_macro, meta, :foo, 1}, _}
assert meta[:line] == 4
assert meta[:column] == 21
assert_receive {{:local_function, meta, :bar, 1}, _}
assert meta[:line] == 4
assert meta[:column] == 32
after
:code.purge(Sample)
:code.delete(Sample)
end
test "traces locals with capture" do
compile_string("""
defmodule Sample do
defmacro foo(arg), do: arg
def bar(arg), do: arg
def baz(_), do: {&foo/1, &bar/1}
end
""")
assert_receive {{:local_macro, meta, :foo, 1}, _}
assert meta[:line] == 4
assert meta[:column] == 20
assert_receive {{:local_function, meta, :bar, 1}, _}
assert meta[:line] == 4
assert meta[:column] == 28
after
:code.purge(Sample)
:code.delete(Sample)
end
test "traces modules" do
compile_string("""
defmodule Sample do
:ok
end
""")
assert_receive {{:on_module, <<_::binary>>, :none}, %{module: Sample, function: nil}}
after
:code.purge(Sample)
:code.delete(Sample)
end
end
| 23.886486 | 89 | 0.603078 |
ff129b10e4acd5761f9840c302f97f622319c36e | 27,432 | exs | Elixir | test/hexpm_web/controllers/dashboard/organization_controller_test.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 691 | 2017-03-08T09:15:45.000Z | 2022-03-23T22:04:47.000Z | test/hexpm_web/controllers/dashboard/organization_controller_test.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 491 | 2017-03-07T12:58:42.000Z | 2022-03-29T23:32:54.000Z | test/hexpm_web/controllers/dashboard/organization_controller_test.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 200 | 2017-03-12T23:03:39.000Z | 2022-03-05T17:55:52.000Z | defmodule HexpmWeb.Dashboard.OrganizationControllerTest do
use HexpmWeb.ConnCase, async: true
use Bamboo.Test
alias Hexpm.Accounts.{Organizations, Users, AuditLogs}
defp add_email(user, email) do
{:ok, user} = Users.add_email(user, %{email: email}, audit: audit_data(user))
user
end
defp mock_customer(organization) do
Mox.stub(Hexpm.Billing.Mock, :get, fn token ->
assert organization.name == token
%{
"checkout_html" => "",
"invoices" => []
}
end)
end
setup do
repository = insert(:repository)
%{
user: insert(:user),
organization: repository.organization
}
end
test "show organization", %{user: user, organization: organization} do
insert(:organization_user, organization: organization, user: user)
mock_customer(organization)
conn =
build_conn()
|> test_login(user)
|> get("dashboard/orgs/#{organization.name}")
assert response(conn, 200) =~ "Members"
end
test "show organization without associated user", %{user: user} do
repository = insert(:repository, organization: build(:organization, user: nil))
insert(:organization_user, organization: repository.organization, user: user)
mock_customer(repository.organization)
conn =
build_conn()
|> test_login(user)
|> get("dashboard/orgs/#{repository.organization.name}")
assert response(conn, 200) =~ "Members"
end
test "requires login" do
conn = get(build_conn(), "dashboard/orgs")
assert redirected_to(conn) == "/login?return=dashboard%2Forgs"
end
test "show organization authenticates", %{user: user, organization: organization} do
build_conn()
|> test_login(user)
|> get("dashboard/orgs/#{organization.name}")
|> response(404)
end
test "add member to organization", %{user: user, organization: organization} do
Mox.stub(Hexpm.Billing.Mock, :get, fn token ->
assert organization.name == token
%{
"checkout_html" => "",
"invoices" => [],
"quantity" => 2
}
end)
insert(:organization_user, organization: organization, user: user, role: "admin")
new_user = insert(:user)
add_email(new_user, "new@mail.com")
params = %{"username" => new_user.username, role: "write"}
conn =
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}", %{
"action" => "add_member",
"organization_user" => params
})
assert redirected_to(conn) == "/dashboard/orgs/#{organization.name}"
assert repo_user = Repo.get_by(assoc(organization, :organization_users), user_id: new_user.id)
assert repo_user.role == "write"
assert_delivered_email(Hexpm.Emails.organization_invite(organization, new_user))
end
test "add member to organization without enough seats", %{
user: user,
organization: organization
} do
Mox.stub(Hexpm.Billing.Mock, :get, fn token ->
assert organization.name == token
%{
"checkout_html" => "",
"invoices" => [],
"quantity" => 1,
"subscription" => %{
"current_period_end" => "2017-12-12T00:00:00Z",
"status" => "active",
"cancel_at_period_end" => false
},
"plan_id" => "organization-monthly",
"amount_with_tax" => 700,
"proration_amount" => 0
}
end)
insert(:organization_user, organization: organization, user: user, role: "admin")
new_user = insert(:user)
add_email(new_user, "new@mail.com")
params = %{"username" => new_user.username, role: "write"}
conn =
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}", %{
"action" => "add_member",
"organization_user" => params
})
response(conn, 400)
assert get_flash(conn, :error) == "Not enough seats in organization to add member."
end
test "remove member from organization", %{user: user, organization: organization} do
insert(:organization_user, organization: organization, user: user, role: "admin")
new_user = insert(:user)
insert(:organization_user, organization: organization, user: new_user)
params = %{"username" => new_user.username}
conn =
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}", %{
"action" => "remove_member",
"organization_user" => params
})
assert redirected_to(conn) == "/dashboard/orgs/#{organization.name}"
refute Repo.get_by(assoc(organization, :organization_users), user_id: new_user.id)
end
test "change role of member in organization", %{user: user, organization: organization} do
insert(:organization_user, organization: organization, user: user, role: "admin")
new_user = insert(:user)
insert(:organization_user, organization: organization, user: new_user, role: "write")
params = %{"username" => new_user.username, "role" => "read"}
conn =
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}", %{
"action" => "change_role",
"organization_user" => params
})
assert redirected_to(conn) == "/dashboard/orgs/#{organization.name}"
assert repo_user = Repo.get_by(assoc(organization, :organization_users), user_id: new_user.id)
assert repo_user.role == "read"
end
test "leave organization", %{user: user, organization: organization} do
insert(:organization_user, organization: organization, user: user, role: "admin")
new_user = insert(:user)
insert(:organization_user, organization: organization, user: new_user, role: "admin")
params = %{"organization_name" => organization.name}
conn =
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/leave", params)
assert redirected_to(conn) == "/dashboard/profile"
refute Repo.get_by(assoc(organization, :organization_users), user_id: user.id)
end
describe "update payment method" do
test "calls Hexpm.Billing.checkout/2 when user is admin", %{
user: user,
organization: organization
} do
insert(:organization_user, organization: organization, user: user, role: "admin")
Mox.expect(Hexpm.Billing.Mock, :checkout, fn organization_name, params ->
assert organization_name == organization.name
assert params == %{payment_source: "Test Token"}
{:ok, :whatever}
end)
conn =
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/billing-token", %{"token" => "Test Token"})
assert json_response(conn, :ok) == %{}
end
test "create audit_log with action billing.checkout", %{
user: user,
organization: organization
} do
insert(:organization_user, organization: organization, user: user, role: "admin")
Mox.expect(Hexpm.Billing.Mock, :checkout, fn _, _ -> {:ok, :whatever} end)
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/billing-token", %{"token" => "Test Token"})
assert [audit_log] = AuditLogs.all_by(user)
assert audit_log.action == "billing.checkout"
assert audit_log.params["organization"]["name"] == organization.name
assert audit_log.params["payment_source"] == "Test Token"
end
end
describe "show billing section" do
test "show for admins", %{user: user, organization: organization} do
insert(:organization_user, organization: organization, user: user, role: "admin")
mock_customer(organization)
conn =
build_conn()
|> test_login(user)
|> get("dashboard/orgs/#{organization.name}")
assert response(conn, 200) =~ "Billing"
assert response(conn, 200) =~ "Billing information"
end
test "hide for non-admins", %{user: user, organization: organization} do
insert(:organization_user, organization: organization, user: user, role: "read")
mock_customer(organization)
conn =
build_conn()
|> test_login(user)
|> get("dashboard/orgs/#{organization.name}")
refute response(conn, 200) =~ "Billing"
refute response(conn, 200) =~ "Billing information"
end
end
describe "cancel billing" do
test "with subscription", %{user: user, organization: organization} do
Mox.stub(Hexpm.Billing.Mock, :cancel, fn token ->
assert organization.name == token
%{
"subscription" => %{
"cancel_at_period_end" => true,
"current_period_end" => "2017-12-12T00:00:00Z"
}
}
end)
insert(:organization_user, organization: organization, user: user, role: "admin")
conn =
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/cancel-billing")
message =
"Your subscription is cancelled, you will have access to the organization until " <>
"the end of your billing period at December 12, 2017"
assert redirected_to(conn) == "/dashboard/orgs/#{organization.name}"
assert get_flash(conn, :info) == message
end
# This can happen when the subscription is cancelled before the trial is over
test "without subscription", %{user: user, organization: organization} do
Mox.stub(Hexpm.Billing.Mock, :cancel, fn token ->
assert organization.name == token
%{}
end)
insert(:organization_user, organization: organization, user: user, role: "admin")
conn =
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/cancel-billing")
assert redirected_to(conn) == "/dashboard/orgs/#{organization.name}"
assert get_flash(conn, :info) == "Your subscription is cancelled"
end
test "create audit_log with action billing.cancel", %{user: user, organization: organization} do
Mox.stub(Hexpm.Billing.Mock, :cancel, fn token ->
assert organization.name == token
%{
"subscription" => %{
"cancel_at_period_end" => true,
"current_period_end" => "2017-12-12T00:00:00Z"
}
}
end)
insert(:organization_user, organization: organization, user: user, role: "admin")
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/cancel-billing")
assert [audit_log] = AuditLogs.all_by(user)
assert audit_log.action == "billing.cancel"
assert audit_log.params["organization"]["name"] == organization.name
end
end
test "show invoice", %{user: user, organization: organization} do
Mox.stub(Hexpm.Billing.Mock, :get, fn token ->
assert organization.name == token
%{"invoices" => [%{"id" => 123}]}
end)
Mox.stub(Hexpm.Billing.Mock, :invoice, fn id ->
assert id == 123
"Invoice"
end)
insert(:organization_user, organization: organization, user: user, role: "admin")
conn =
build_conn()
|> test_login(user)
|> get("dashboard/orgs/#{organization.name}/invoices/123")
assert response(conn, 200) == "Invoice"
end
describe "pay invoice" do
test "pay invoice succeed", %{user: user, organization: organization} do
Mox.stub(Hexpm.Billing.Mock, :get, fn token ->
assert organization.name == token
invoice = %{
"id" => 123,
"date" => "2020-01-01T00:00:00Z",
"amount_due" => 700,
"paid" => true
}
%{"invoices" => [invoice]}
end)
Mox.stub(Hexpm.Billing.Mock, :pay_invoice, fn id ->
assert id == 123
:ok
end)
insert(:organization_user, organization: organization, user: user, role: "admin")
conn =
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/invoices/123/pay")
assert redirected_to(conn) == "/dashboard/orgs/#{organization.name}"
assert get_flash(conn, :info) == "Invoice paid."
end
test "pay invoice failed", %{user: user, organization: organization} do
Mox.stub(Hexpm.Billing.Mock, :get, fn token ->
assert organization.name == token
invoice = %{
"id" => 123,
"date" => "2020-01-01T00:00:00Z",
"amount_due" => 700,
"paid" => true
}
%{"invoices" => [invoice], "checkout_html" => ""}
end)
Mox.stub(Hexpm.Billing.Mock, :pay_invoice, fn id ->
assert id == 123
{:error, %{"errors" => "Card failure"}}
end)
insert(:organization_user, organization: organization, user: user, role: "admin")
conn =
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/invoices/123/pay")
response(conn, 400)
assert get_flash(conn, :error) == "Failed to pay invoice: Card failure."
end
test "create audit_log with action billing.pay_invoice", %{
user: user,
organization: organization
} do
Mox.stub(Hexpm.Billing.Mock, :get, fn _token -> %{"invoices" => [%{"id" => 123}]} end)
Mox.stub(Hexpm.Billing.Mock, :pay_invoice, fn _id -> :ok end)
insert(:organization_user, organization: organization, user: user, role: "admin")
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/invoices/123/pay")
assert [audit_log] = AuditLogs.all_by(user)
assert audit_log.action == "billing.pay_invoice"
assert audit_log.params["invoice_id"] == 123
assert audit_log.params["organization"]["name"] == organization.name
end
end
describe "POST /dashboard/orgs/:dashboard_org/update-billing" do
test "update billing email", %{user: user, organization: organization} do
mock_customer(organization)
Mox.stub(Hexpm.Billing.Mock, :update, fn token, params ->
assert organization.name == token
assert %{"email" => "billing@example.com"} = params
{:ok, %{}}
end)
insert(:organization_user, organization: organization, user: user, role: "admin")
conn =
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/update-billing", %{
"email" => "billing@example.com"
})
assert redirected_to(conn) == "/dashboard/orgs/#{organization.name}"
assert get_flash(conn, :info) == "Updated your billing information."
end
test "create audit_log with action billing.update", %{user: user, organization: organization} do
mock_customer(organization)
Mox.stub(Hexpm.Billing.Mock, :update, fn _, _ -> {:ok, %{}} end)
insert(:organization_user, organization: organization, user: user, role: "admin")
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/update-billing", %{
"email" => "billing@example.com"
})
assert [audit_log] = AuditLogs.all_by(user)
assert audit_log.action == "billing.update"
assert audit_log.params["email"] == "billing@example.com"
assert audit_log.params["organization"]["name"] == organization.name
end
end
test "create organization", %{user: user} do
Mox.stub(Hexpm.Billing.Mock, :create, fn params ->
assert params == %{
"person" => %{"country" => "SE"},
"token" => "createrepo",
"company" => nil,
"email" => "eric@mail.com",
"quantity" => 1
}
{:ok, %{}}
end)
params = %{
"organization" => %{"name" => "createrepo"},
"person" => %{"country" => "SE"},
"email" => "eric@mail.com"
}
conn =
build_conn()
|> test_login(user)
|> post("dashboard/orgs", params)
response(conn, 302)
assert get_resp_header(conn, "location") == ["/dashboard/orgs/createrepo"]
assert get_flash(conn, :info) ==
"Organization created with one month free trial period active."
assert organization = Organizations.get("createrepo", [:repository])
assert organization.repository.name == "createrepo"
end
test "create organization validates name", %{user: user} do
insert(:organization, name: "createrepovalidates")
params = %{
"organization" => %{"name" => "createrepovalidates"},
"person" => %{"country" => "SE"},
"email" => "eric@mail.com"
}
conn =
build_conn()
|> test_login(user)
|> post("dashboard/orgs", params)
assert response(conn, 400) =~ "Oops, something went wrong"
assert response(conn, 400) =~ "has already been taken"
end
describe "POST /dashboard/orgs/:dashboard_org/create-billing" do
test "create billing customer after organization", %{user: user, organization: organization} do
Mox.stub(Hexpm.Billing.Mock, :create, fn params ->
assert params == %{
"person" => %{"country" => "SE"},
"token" => organization.name,
"company" => nil,
"email" => "eric@mail.com",
"quantity" => 1
}
{:ok, %{}}
end)
insert(:organization_user, organization: organization, user: user, role: "admin")
params = %{
"organization" => %{"name" => organization.name},
"person" => %{"country" => "SE"},
"email" => "eric@mail.com"
}
conn =
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/create-billing", params)
response(conn, 302)
assert get_resp_header(conn, "location") == ["/dashboard/orgs/#{organization.name}"]
assert get_flash(conn, :info) == "Updated your billing information."
end
test "create audit_log with action billing.create", %{user: user, organization: organization} do
Mox.stub(Hexpm.Billing.Mock, :create, fn _ -> {:ok, %{}} end)
insert(:organization_user, organization: organization, user: user, role: "admin")
params = %{"company" => nil, "person" => nil}
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/create-billing", params)
assert [%{action: "billing.create"} = audit_log] = AuditLogs.all_by(user)
assert audit_log.params["organization"]["name"] == organization.name
end
end
describe "POST /dashboard/orgs/:dashboard_org/add-seats" do
test "increase number of seats", %{organization: organization, user: user} do
Mox.stub(Hexpm.Billing.Mock, :update, fn organization_name, map ->
assert organization_name == organization.name
assert map == %{"quantity" => 3}
{:ok, %{}}
end)
insert(:organization_user, organization: organization, user: user, role: "admin")
conn =
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/add-seats", %{
"current-seats" => "1",
"add-seats" => "2"
})
assert redirected_to(conn) == "/dashboard/orgs/#{organization.name}"
assert get_flash(conn, :info) == "The number of open seats have been increased."
end
test "seats cannot be less than number of members", %{organization: organization, user: user} do
mock_customer(organization)
insert(:organization_user, organization: organization, user: user, role: "admin")
insert(:organization_user, organization: organization, user: build(:user))
insert(:organization_user, organization: organization, user: build(:user))
conn =
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/add-seats", %{
"current-seats" => "1",
"add-seats" => "1"
})
response(conn, 400)
assert get_flash(conn, :error) ==
"The number of open seats cannot be less than the number of organization members."
end
test "create audit_log with action billing.update", %{organization: organization, user: user} do
Mox.stub(Hexpm.Billing.Mock, :update, fn _organization_name, _map -> {:ok, %{}} end)
insert(:organization_user, organization: organization, user: user, role: "admin")
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/add-seats", %{
"current-seats" => "1",
"add-seats" => "1"
})
assert [audit_log] = AuditLogs.all_by(user)
assert audit_log.action == "billing.update"
assert audit_log.params["quantity"] == 2
assert audit_log.params["organization"]["name"] == organization.name
end
end
describe "POST /dashboard/orgs/:dashboard_org/remove-seats" do
test "increase number of seats", %{organization: organization, user: user} do
Mox.stub(Hexpm.Billing.Mock, :update, fn organization_name, map ->
assert organization_name == organization.name
assert map == %{"quantity" => 3}
{:ok, %{}}
end)
insert(:organization_user, organization: organization, user: user, role: "admin")
conn =
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/remove-seats", %{
"seats" => "3"
})
assert redirected_to(conn) == "/dashboard/orgs/#{organization.name}"
assert get_flash(conn, :info) == "The number of open seats have been reduced."
end
test "seats cannot be less than number of members", %{organization: organization, user: user} do
mock_customer(organization)
insert(:organization_user, organization: organization, user: build(:user))
insert(:organization_user, organization: organization, user: user, role: "admin")
conn =
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/remove-seats", %{
"seats" => "1"
})
response(conn, 400)
assert get_flash(conn, :error) ==
"The number of open seats cannot be less than the number of organization members."
end
test "create audit_log with action billing.update", %{organization: organization, user: user} do
Mox.stub(Hexpm.Billing.Mock, :update, fn _organization_name, _map -> {:ok, %{}} end)
insert(:organization_user, organization: organization, user: user, role: "admin")
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/remove-seats", %{
"seats" => "4"
})
assert [audit_log] = AuditLogs.all_by(user)
assert audit_log.action == "billing.update"
assert audit_log.params["quantity"] == 4
assert audit_log.params["organization"]["name"] == organization.name
end
end
describe "POST /dashboard/orgs/:dashboard_org/change-plan" do
test "change plan", %{organization: organization, user: user} do
Mox.stub(Hexpm.Billing.Mock, :change_plan, fn organization_name, map ->
assert organization_name == organization.name
assert map == %{"plan_id" => "organization-annually"}
{:ok, %{}}
end)
insert(:organization_user, organization: organization, user: user, role: "admin")
conn =
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/change-plan", %{
"plan_id" => "organization-annually"
})
assert redirected_to(conn) == "/dashboard/orgs/#{organization.name}"
assert get_flash(conn, :info) == "You have switched to the annual organization plan."
end
test "create audit_log with action billing.change_plan", %{
organization: organization,
user: user
} do
Mox.stub(Hexpm.Billing.Mock, :change_plan, fn _organization_name, _map -> {:ok, %{}} end)
insert(:organization_user, organization: organization, user: user, role: "admin")
build_conn()
|> test_login(user)
|> post("dashboard/orgs/#{organization.name}/change-plan", %{
"plan_id" => "organization-annually"
})
assert [audit_log] = AuditLogs.all_by(user)
assert audit_log.action == "billing.change_plan"
assert audit_log.params["organization"]["name"] == organization.name
assert audit_log.params["plan_id"] == "organization-annually"
end
end
describe "POST /dashboard/orgs/:dashboard_org/keys" do
test "generate a new key", c do
insert(:organization_user, organization: c.organization, user: c.user, role: "admin")
conn =
build_conn()
|> test_login(c.user)
|> post("/dashboard/orgs/#{c.organization.name}/keys", %{key: %{name: "computer"}})
assert redirected_to(conn) == "/dashboard/orgs/#{c.organization.name}"
assert get_flash(conn, :info) =~ "The key computer was successfully generated"
end
end
describe "DELETE /dashboard/orgs/:dashboard_org/keys" do
test "revoke key", c do
insert(:organization_user, organization: c.organization, user: c.user, role: "admin")
insert(:key, organization: c.organization, name: "computer")
mock_customer(c.organization)
conn =
build_conn()
|> test_login(c.user)
|> delete("/dashboard/orgs/#{c.organization.name}/keys", %{name: "computer"})
assert redirected_to(conn) == "/dashboard/orgs/#{c.organization.name}"
assert get_flash(conn, :info) =~ "The key computer was revoked successfully"
end
test "revoking an already revoked key throws an error", c do
insert(:organization_user, organization: c.organization, user: c.user, role: "admin")
insert(
:key,
organization: c.organization,
name: "computer",
revoked_at: ~N"2017-01-01 00:00:00"
)
mock_customer(c.organization)
conn =
build_conn()
|> test_login(c.user)
|> delete("/dashboard/orgs/#{c.organization.name}/keys", %{name: "computer"})
assert response(conn, 400) =~ "The key computer was not found"
end
end
describe "POST /dashboard/orgs/:dashboard_org/profile" do
test "requires admin role", c do
insert(:organization_user, organization: c.organization, user: c.user, role: "write")
mock_customer(c.organization)
conn =
build_conn()
|> test_login(c.user)
|> post("/dashboard/orgs/#{c.organization.name}/profile", %{profile: %{}})
assert response(conn, 400) =~ "You do not have permission for this action."
end
test "when update succeeds", c do
insert(:organization_user, organization: c.organization, user: c.user, role: "admin")
mock_customer(c.organization)
conn =
build_conn()
|> test_login(c.user)
|> post("/dashboard/orgs/#{c.organization.name}/profile", %{profile: %{}})
assert redirected_to(conn) == "/dashboard/orgs/#{c.organization.name}"
assert get_flash(conn, :info) == "Profile updated successfully."
end
test "when update fails", c do
insert(:organization_user, organization: c.organization, user: c.user, role: "admin")
mock_customer(c.organization)
conn =
build_conn()
|> test_login(c.user)
|> post("/dashboard/orgs/#{c.organization.name}/profile", %{
profile: %{public_email: "invalid_email"}
})
assert get_flash(conn, :error) == "Oops, something went wrong!"
assert response(conn, 400)
end
end
end
| 32.971154 | 100 | 0.620917 |
ff129dd030449dfef79d47adc936f57d807b1871 | 10,418 | ex | Elixir | lib/currency.ex | jessejanderson/monetized | b48b7aeea9535a0bec3b6804124d0ee963265d51 | [
"MIT"
] | null | null | null | lib/currency.ex | jessejanderson/monetized | b48b7aeea9535a0bec3b6804124d0ee963265d51 | [
"MIT"
] | null | null | null | lib/currency.ex | jessejanderson/monetized | b48b7aeea9535a0bec3b6804124d0ee963265d51 | [
"MIT"
] | null | null | null | defmodule Monetized.Currency do
@currencies [
%{key: "ARS", name: "Argentinian Peso", symbol: "A$", type: "Fiat"},
%{key: "CAD", name: "Canadian Dollar", symbol: "C$", type: "Fiat"},
%{key: "EUR", name: "Euro", symbol: "€", type: "Fiat"},
%{key: "GBP", name: "Pound Sterling", symbol: "£", type: "Fiat"},
%{key: "HKD", name: "Hong Kong Dollar", symbol: "HK$", type: "Fiat"},
%{key: "PHP", name: "Philippine Peso", symbol: "₱", type: "Fiat"},
%{key: "THB", name: "Thai Baht", symbol: "฿", type: "Fiat"},
%{key: "USD", name: "US Dollar", symbol: "$", type: "Fiat"},
%{key: "BTC", name: "Bitcoin", symbol: "₿", type: "Crypto"},
%{key: "ETH", name: "Ethereum", symbol: "Ξ", type: "Crypto"},
%{key: "XRP", name: "Ripple", type: "Crypto"},
%{key: "BCH", name: "Bitcoin Cash", type: "Crypto"},
%{key: "LTC", name: "Litecoin", symbol: "Ł", type: "Crypto"},
%{key: "ADA", name: "Cardano", type: "Crypto"},
%{key: "NEO", name: "NEO", type: "Crypto"},
%{key: "XLM", name: "Stellar", type: "Crypto"},
%{key: "MIOTA", name: "IOTA", type: "Crypto"},
%{key: "DASH", name: "Dash", type: "Crypto"},
%{key: "XMR", name: "Monero", type: "Crypto"},
%{key: "XEM", name: "NEM", type: "Crypto"},
%{key: "ETC", name: "Ethereum Classic", type: "Crypto"},
%{key: "LSK", name: "Lisk", type: "Crypto"},
%{key: "QTUM", name: "Qtum", type: "Crypto"},
%{key: "BTG", name: "Bitcoin Gold", type: "Crypto"},
%{key: "ZEC", name: "Zcash", type: "Crypto"},
%{key: "XVG", name: "Verge", type: "Crypto"},
%{key: "NANO", name: "Nano", type: "Crypto"},
%{key: "STEEM", name: "Steem", type: "Crypto"},
%{key: "BCN", name: "Bytecoin", type: "Crypto"},
%{key: "STRAT", name: "Stratis", type: "Crypto"},
%{key: "SC", name: "Siacoin", type: "Crypto"},
%{key: "WAVES", name: "Waves", type: "Crypto"},
%{key: "DOGE", name: "Dogecoin", type: "Crypto"},
%{key: "BTS", name: "BitShares", type: "Crypto"},
%{key: "ZCL", name: "ZClassic", type: "Crypto"},
%{key: "DCR", name: "Decred", type: "Crypto"},
%{key: "HSR", name: "Hshare", type: "Crypto"},
%{key: "ETN", name: "Electroneum", type: "Crypto"},
%{key: "KMD", name: "Komodo", type: "Crypto"},
%{key: "ARDR", name: "Ardor", type: "Crypto"},
%{key: "ARK", name: "Ark", type: "Crypto"},
%{key: "DGB", name: "DigiByte", type: "Crypto"},
%{key: "GBYTE", name: "Byteball Bytes", type: "Crypto"},
%{key: "SYS", name: "Syscoin", type: "Crypto"},
%{key: "CNX", name: "Cryptonex", type: "Crypto"},
%{key: "MONA", name: "MonaCoin", type: "Crypto"},
%{key: "PIVX", name: "PIVX", type: "Crypto"},
%{key: "BTX", name: "Bitcore", type: "Crypto"},
%{key: "GXS", name: "GXShares", type: "Crypto"},
%{key: "FCT", name: "Factom", type: "Crypto"},
%{key: "XZC", name: "ZCoin", type: "Crypto"},
%{key: "NXT", name: "Nxt", type: "Crypto"},
%{key: "PART", name: "Particl", type: "Crypto"},
%{key: "NEBL", name: "Neblio", type: "Crypto"},
%{key: "RDD", name: "ReddCoin", type: "Crypto"},
%{key: "EMC", name: "Emercoin", type: "Crypto"},
%{key: "SMART", name: "SmartCash", type: "Crypto"},
%{key: "VTC", name: "Vertcoin", type: "Crypto"},
%{key: "GAME", name: "GameCredits", type: "Crypto"},
%{key: "BLOCK", name: "Blocknet", type: "Crypto"},
%{key: "XDN", name: "DigitalNote", type: "Crypto"},
%{key: "XP", name: "Experience Points", type: "Crypto"},
%{key: "BTCD", name: "BitcoinDark", type: "Crypto"},
%{key: "ACT", name: "Achain", type: "Crypto"},
%{key: "NXS", name: "Nexus", type: "Crypto"},
%{key: "BCO", name: "BridgeCoin", type: "Crypto"},
%{key: "SKY", name: "Skycoin", type: "Crypto"},
%{key: "ZEN", name: "ZenCash", type: "Crypto"},
%{key: "NAV", name: "NAV Coin", type: "Crypto"},
%{key: "UBQ", name: "Ubiq", type: "Crypto"},
%{key: "HTML", name: "HTMLCOIN", type: "Crypto"},
%{key: "PPC", name: "Peercoin", type: "Crypto"},
%{key: "XBY", name: "XTRABYTES", type: "Crypto"},
%{key: "XAS", name: "Asch", type: "Crypto"},
%{key: "EMC2", name: "Einsteinium", type: "Crypto"},
%{key: "ETP", name: "Metaverse ETP", type: "Crypto"},
%{key: "VIA", name: "Viacoin", type: "Crypto"},
%{key: "PAC", name: "PACcoin", type: "Crypto"},
%{key: "XCP", name: "Counterparty", type: "Crypto"},
%{key: "PURA", name: "PURA", type: "Crypto"},
%{key: "BAY", name: "BitBay", type: "Crypto"},
%{key: "BUEST", name: "Burst", type: "Crypto"},
%{key: "AEON", name: "Aeon", type: "Crypto"},
%{key: "MNX", name: "MinexCoin", type: "Crypto"},
%{key: "ECC", name: "ECC", type: "Crypto"},
%{key: "NLG", name: "Gulden", type: "Crypto"},
%{key: "LBC", name: "LBRY Credit", type: "Crypto"},
%{key: "CRW", name: "Crown", type: "Crypto"},
%{key: "SLS", name: "SaluS", type: "Crypto"},
%{key: "CLOAK", name: "CloakCoin", type: "Crypto"},
%{key: "ION", name: "ION", type: "Crypto"},
%{key: "RISE", name: "Rise", type: "Crypto"},
%{key: "THC", name: "HempCoin", type: "Crypto"},
%{key: "GRS", name: "Groestlcoin", type: "Crypto"},
%{key: "SBD", name: "Steem Dollars", type: "Crypto"},
%{key: "FTC", name: "Feathercoin", type: "Crypto"},
%{key: "COLX", name: "ColossusCoinXT", type: "Crypto"},
%{key: "ONION", name: "DeepOnion", type: "Crypto"},
%{key: "DCT", name: "DECENT", type: "Crypto"},
%{key: "NMC", name: "Namecoin", type: "Crypto"},
%{key: "DIME", name: "Dimecoin", type: "Crypto"},
%{key: "LKK", name: "Lykke", type: "Crypto"},
%{key: "SIB", name: "SIBCoin", type: "Crypto"},
%{key: "DMD", name: "Diamond", type: "Crypto"},
%{key: "IOC", name: "I/O Coin", type: "Crypto"},
%{key: "POT", name: "PotCoin", type: "Crypto"},
%{key: "XWC", name: "WhiteCoin", type: "Crypto"},
%{key: "ECA", name: "Electra", type: "Crypto"}
]
@currency_map Enum.reduce(@currencies, %{}, fn currency, acc ->
Map.put(acc, currency.key, currency)
end)
@moduledoc """
Defines available currencies and functions to handle them.
"""
@doc """
Attempts to parse the currency from the given string
based on both the currency key and the symbol.
## Examples
iex> Monetized.Currency.parse("EUR 200.00")
%{name: "Euro", symbol: "€", key: "EUR", type: "Fiat"}
iex> Monetized.Currency.parse("£ 200.00")
%{name: "Pound Sterling", symbol: "£", key: "GBP", type: "Fiat"}
iex> Monetized.Currency.parse("200.00 USD")
%{name: "US Dollar", symbol: "$", key: "USD", type: "Fiat"}
iex> Monetized.Currency.parse("200.00 THB")
%{name: "Thai Baht", symbol: "฿", key: "THB", type: "Fiat"}
iex> Monetized.Currency.parse("200.00 PHP")
%{key: "PHP", name: "Philippine Peso", symbol: "₱", type: "Fiat"}
iex> Monetized.Currency.parse("₿ 200.00")
%{key: "BTC", name: "Bitcoin", symbol: "₿", type: "Crypto"}
iex> Monetized.Currency.parse("200.00 BTC")
%{key: "BTC", name: "Bitcoin", symbol: "₿", type: "Crypto"}
"""
def parse(str) do
parse_by_symbol(str) || parse_by_key(str)
end
@doc """
Attempts to parse the currency from the given string
based on the currency key.
## Examples
iex> Monetized.Currency.parse_by_key("EUR 200.00")
%{name: "Euro", symbol: "€", key: "EUR", type: "Fiat"}
iex> Monetized.Currency.parse_by_key("200.00 USD")
%{name: "US Dollar", symbol: "$", key: "USD", type: "Fiat"}
iex> Monetized.Currency.parse_by_key("200.00 GBP")
%{name: "Pound Sterling", symbol: "£", key: "GBP", type: "Fiat"}
iex> Monetized.Currency.parse_by_key("200.00 THB")
%{name: "Thai Baht", symbol: "฿", key: "THB", type: "Fiat"}
iex> Monetized.Currency.parse_by_key("200.00 PHP")
%{key: "PHP", name: "Philippine Peso", symbol: "₱", type: "Fiat"}
iex> Monetized.Currency.parse_by_key("200.00 ARS")
%{key: "ARS", name: "Argentinian Peso", symbol: "A$", type: "Fiat"}
iex> Monetized.Currency.parse_by_key("200.00 BTC")
%{key: "BTC", name: "Bitcoin", symbol: "₿", type: "Crypto"}
"""
def parse_by_key(str) do
piped = Enum.join(keys(), "|")
case Regex.scan(~r/#{piped}/, str) do
[[a]] ->
get(a)
_ ->
nil
end
end
@doc """
Attempts to guess the currency from the given string
based on the currency symbol.
## Examples
iex> Monetized.Currency.parse_by_symbol("€ 200.00")
%{name: "Euro", symbol: "€", key: "EUR", type: "Fiat"}
iex> Monetized.Currency.parse_by_symbol("$200.00")
%{name: "US Dollar", symbol: "$", key: "USD", type: "Fiat"}
iex> Monetized.Currency.parse_by_symbol("£200.00")
%{name: "Pound Sterling", symbol: "£", key: "GBP", type: "Fiat"}
iex> Monetized.Currency.parse_by_symbol("฿200.00")
%{name: "Thai Baht", symbol: "฿", key: "THB", type: "Fiat"}
iex> Monetized.Currency.parse_by_symbol("₱200.00")
%{key: "PHP", name: "Philippine Peso", symbol: "₱", type: "Fiat"}
iex> Monetized.Currency.parse_by_symbol("₿200.00")
%{key: "BTC", name: "Bitcoin", symbol: "₿", type: "Crypto"}
"""
@currencies
|> Enum.filter(&Map.has_key?(&1, :symbol))
|> Enum.map(&Map.to_list/1)
|> Enum.each(fn currency ->
def parse_by_symbol(unquote(Keyword.get(currency, :symbol)) <> _rest),
do: Enum.into(unquote(currency), %{})
end)
def parse_by_symbol(_), do: nil
@doc """
Retrieves the currency struct for the given key.
## Examples
iex> Monetized.Currency.get("EUR")
%{name: "Euro", symbol: "€", key: "EUR", type: "Fiat"}
iex> Monetized.Currency.get("THB")
%{name: "Thai Baht", symbol: "฿", key: "THB", type: "Fiat"}
iex> Monetized.Currency.get("PHP")
%{key: "PHP", name: "Philippine Peso", symbol: "₱", type: "Fiat"}
iex> Monetized.Currency.get("BTC")
%{key: "BTC", name: "Bitcoin", symbol: "₿", type: "Crypto"}
"""
@spec get(String.t()) :: struct
def get(key), do: all()[key]
@doc """
Retrieves a struct holding all the currency options.
"""
@spec all() :: %{bitstring() => %{name: bitstring(), symbol: bitstring(), key: bitstring()}}
def all, do: @currency_map
@doc """
Retrieves a list of currency options keys
"""
@spec keys :: list
def keys, do: Map.keys(all())
end
| 37.207143 | 94 | 0.550106 |
ff12be9b7849e0ea6ff104ae50a39c99fb444ec6 | 1,347 | exs | Elixir | template/$PROJECT_NAME$/apps/$PROJECT_NAME$_web/test/$PROJECT_NAME$_web/controllers/accounts/forgot_password_controller_test.exs | fadeojo/mithril | d84ff2d42f895c27c46c0feb09b70ccbac5827ac | [
"MIT"
] | 54 | 2018-01-24T00:22:57.000Z | 2019-01-15T20:03:52.000Z | template/$PROJECT_NAME$/apps/$PROJECT_NAME$_web/test/$PROJECT_NAME$_web/controllers/accounts/forgot_password_controller_test.exs | infinitered/mithril | 0bbad29f86c63d9a827dcaaf6fed78a176ab90d7 | [
"MIT"
] | 3 | 2018-02-05T18:00:39.000Z | 2018-05-04T00:05:20.000Z | template/$PROJECT_NAME$/apps/$PROJECT_NAME$_web/test/$PROJECT_NAME$_web/controllers/accounts/forgot_password_controller_test.exs | fadeojo/mithril | d84ff2d42f895c27c46c0feb09b70ccbac5827ac | [
"MIT"
] | 3 | 2019-07-12T11:16:42.000Z | 2020-06-08T15:03:36.000Z | defmodule <%= @project_name_camel_case %>Web.Accounts.ForgotPasswordControllerTest do
use <%= @project_name_camel_case %>Web.ConnCase
alias <%= @project_name_camel_case %>.Accounts
describe ".new/2" do
test "renders a form to reset password", %{conn: conn} do
response =
conn
|> get(Routes.forgot_password_path(conn, :new))
|> html_response(200)
assert response =~ "form"
assert response =~ "input id=\"user_email\""
end
end
describe ".create/2" do
setup [:create_user]
test "sends password reset instructions via email if email exists", %{conn: conn} do
params = %{"user" => %{"email" => "test@example.com"}}
conn = post(conn, Routes.forgot_password_path(conn, :create), params)
assert_received {:email, email}
assert email.to == [{"", "test@example.com"}]
assert email.text_body =~ Routes.reset_password_url(conn, :new)
assert email.html_body =~ Routes.reset_password_url(conn, :new)
assert get_flash(conn, :success)
assert redirected_to(conn) == Routes.page_path(conn, :index)
end
end
defp create_user(_) do
{:ok, user} =
Accounts.create_user(%{
email: "test@example.com",
password: "p@$$w0rd",
password_confirmation: "p@$$w0rd"
})
{:ok, [user: user]}
end
end | 30.613636 | 88 | 0.631774 |
ff12c56a72f48abe8620f5600c5e6210e0d7fcf5 | 5,072 | exs | Elixir | test/integration/macro/function_test.exs | fcapovilla/fake_server | 68ee414ed421127f5aaca19c104b7ba01c12d716 | [
"Apache-2.0"
] | null | null | null | test/integration/macro/function_test.exs | fcapovilla/fake_server | 68ee414ed421127f5aaca19c104b7ba01c12d716 | [
"Apache-2.0"
] | null | null | null | test/integration/macro/function_test.exs | fcapovilla/fake_server | 68ee414ed421127f5aaca19c104b7ba01c12d716 | [
"Apache-2.0"
] | null | null | null | defmodule FakeServer.Integration.FunctionTest do
use ExUnit.Case, async: false
import FakeServer
alias FakeServer.Request
alias FakeServer.Response
describe "when using functions as response" do
test_with_server "raises FakeServer.Error if the function arity is not 1" do
assert_raise FakeServer.Error, fn ->
route "/test", fn -> FakeServer.Response.ok! end
end
end
test_with_server "returns the default response if the function does not return a FakeServer.Response object" do
route "/test", fn(_) -> :ok end
response = HTTPoison.get! FakeServer.address <> "/test"
assert response.status_code == 200
assert response.body == ~s<{"message": "This is a default response from FakeServer"}>
end
test_with_server "returns the corresponding response if the function returns a FakeServer.Response object" do
route "/test", fn(_) -> Response.bad_request! end
response = HTTPoison.get! FakeServer.address <> "/test"
assert response.status_code == 400
end
test_with_server "returns the corresponding response if the function returns a {:ok, FakeServer.Response} tuple" do
route "/test", fn(_) -> Response.bad_request end
response = HTTPoison.get! FakeServer.address <> "/test"
assert response.status_code == 400
end
test_with_server "computes hits for the corresponding route" do
route "/test", fn(_) -> Response.bad_request! end
assert hits() == 0
assert hits("/test") == 0
HTTPoison.get! FakeServer.address <> "/test"
assert hits() == 1
assert hits("/test") == 1
end
test_with_server "ensures the request has a body" do
route "/test", fn(%Request{body: body}) ->
if body == "TEST", do: Response.no_content!, else: Response.bad_request!
end
response1 = HTTPoison.post!("#{FakeServer.address}/test", "TEST", [], [])
assert response1.status_code == 204
response2 = HTTPoison.post!("#{FakeServer.address}/test", "NOT_TEST", [], [])
assert response2.status_code == 400
end
test_with_server "decode the body if the content_type header is application/json and the body is decodable" do
route "/test", fn(%Request{body: body}) ->
if body == %{"test" => true}, do: Response.ok!, else: Response.bad_request!
end
response1 = HTTPoison.post!("#{FakeServer.address}/test", ~s<{"test": true}>, ["Content-Type": "application/json"], [])
assert response1.status_code == 200
response2 = HTTPoison.post!("#{FakeServer.address}/test", ~s<not a valid json>, ["Content-Type": "application/json"], [])
assert response2.status_code == 400
end
test_with_server "ensures the request has cookies" do
route "/test", fn(%Request{cookies: cookies}) ->
if Map.get(cookies, "logged_in") == "true", do: Response.ok!, else: Response.forbidden!
end
response1 = HTTPoison.get!("#{FakeServer.address}/test", %{}, hackney: [cookie: ["logged_in=true"]])
assert response1.status_code == 200
response2 = HTTPoison.get!("#{FakeServer.address}/test", %{}, hackney: [cookie: ["logged_in=false"]])
assert response2.status_code == 403
end
test_with_server "ensures the request has headers" do
route "/test", fn(%Request{headers: headers}) ->
if Map.get(headers, "authorization") == "Bearer 1234", do: Response.ok!, else: Response.forbidden!
end
response1 = HTTPoison.get!("#{FakeServer.address}/test", ["Authorization": "Bearer 1234"])
assert response1.status_code == 200
response2 = HTTPoison.get!("#{FakeServer.address}/test")
assert response2.status_code == 403
end
test_with_server "ensures the request has a method" do
route "/test", fn(%Request{method: method}) ->
if method == "GET", do: Response.ok!, else: Response.bad_request!
end
response1 = HTTPoison.get!("#{FakeServer.address}/test")
assert response1.status_code == 200
response2 = HTTPoison.post!("#{FakeServer.address}/test", [], [])
assert response2.status_code == 400
end
test_with_server "ensures the request has a query" do
route "/test", fn(%Request{query: query}) ->
if Map.get(query, "access_token") == "1234", do: Response.ok!, else: Response.forbidden!
end
response1 = HTTPoison.get!("#{FakeServer.address}/test?access_token=1234")
assert response1.status_code == 200
response2 = HTTPoison.get!("#{FakeServer.address}/test", [], [])
assert response2.status_code == 403
end
test_with_server "ensures the request has a query_string" do
route "/test", fn(%Request{query_string: query_string}) ->
if query_string == "access_token=1234", do: Response.ok!, else: Response.forbidden!
end
response1 = HTTPoison.get!("#{FakeServer.address}/test?access_token=1234")
assert response1.status_code == 200
response2 = HTTPoison.get!("#{FakeServer.address}/test", [], [])
assert response2.status_code == 403
end
end
end
| 44.491228 | 127 | 0.660095 |
ff12c8b70a6bffe9c806965eb8eac2dcde057b55 | 7,316 | ex | Elixir | lib/coap/socket_server.ex | OffgridElectric/coap_ex | 7d83f8a7fbf26e6e7f2baa8aa10d0f7c457365b6 | [
"MIT"
] | 16 | 2019-02-23T10:04:26.000Z | 2022-03-14T03:12:45.000Z | lib/coap/socket_server.ex | tpitale/coap_ex | 28b62274336742b41e3426b73294a0e74e6a5b67 | [
"MIT"
] | 51 | 2019-05-06T15:34:58.000Z | 2021-01-21T14:32:32.000Z | lib/coap/socket_server.ex | OffgridElectric/coap_ex | 7d83f8a7fbf26e6e7f2baa8aa10d0f7c457365b6 | [
"MIT"
] | 3 | 2020-06-12T11:47:09.000Z | 2020-12-08T14:59:36.000Z | defmodule CoAP.SocketServer do
@moduledoc """
CoAP.SocketServer holds a reference to a server, or is held by a client.
It contains the UDP port either listening (for a server) or used by a client.
When a new UDP packet is received, the socket_server attempts to look up an
existing connection, or establish a new connection as necessary.
This registry of connections is mapped by a connection_id, a tuple of
`{ip, port, token}` so that subsequent messages exchanged will be routed
to the appropriate `CoAP.Connection`.
A socket_server should generally not be started directly. It will be started
automatically by a `CoAP.Client` or by a server like `CoAP.Phoenix.Listener`.
A socket_server will receive and handle a few messages. `:udp` from the udp socket,
`:deliver` from the `CoAP.Connection` when a message is being sent, and `:DOWN`
when a monitored connection is complete and the process ends.
"""
use GenServer
import CoAP.Util.BinaryFormatter, only: [to_hex: 1]
import Logger, only: [debug: 1, warn: 1]
alias CoAP.Message
def child_spec([_, connection_id, _] = args) do
%{
id: connection_id,
start: {__MODULE__, :start_link, [args]},
restart: :transient,
modules: [__MODULE__]
}
end
def start(args), do: GenServer.start(__MODULE__, args)
def start_link(args), do: GenServer.start_link(__MODULE__, args)
# init with port 5163/config (server), or 0 (client)
# endpoint => server
@doc """
`init` functions for Server (e.g., phoenix endpoint) and Client
When only `endpoint` and `port` (and optionally `config`) are provided, init for Server:
Open a udp socket on the given port and store in state
Initialize connections and monitors empty maps in state
When `endpoint`, `host`, `port`, `token`, and `connection` are provided, init for Client:
Opens a socket for sending (and receiving responses on a random listener)
Does not listen on any known port for new messages
Started by a `Connection` to deliver a client request message
"""
def init([endpoint, port]), do: init([endpoint, port, []])
def init([endpoint, {host, port, token}, connection]) do
{:ok, socket} = :gen_udp.open(0, [:binary, {:active, true}, {:reuseaddr, true}])
ip = normalize_host(host)
connection_id = {ip, port, token}
ref = Process.monitor(connection)
debug("Existing conn: #{inspect(connection)} w/ ref: #{inspect(ref)}")
{:ok,
%{
port: 0,
socket: socket,
endpoint: endpoint,
connections: %{connection_id => connection},
monitors: %{ref => connection_id}
}}
end
def init([endpoint, port, config]) do
{:ok, socket} = :gen_udp.open(port, [:binary, {:active, true}, {:reuseaddr, true}])
{:ok,
%{
port: port,
socket: socket,
endpoint: endpoint,
connections: %{},
monitors: %{},
config: config
}}
end
# Receive udp packets, forward to the appropriate connection
def handle_info({:udp, _socket, peer_ip, peer_port, data}, state) do
debug("CoAP socket received raw data #{to_hex(data)} from #{inspect({peer_ip, peer_port})}")
message = Message.decode(data)
{connection, new_state} =
connection_for(message.request, {peer_ip, peer_port, message.token}, state)
case connection do
nil ->
warn(
"CoAP socket received message for lost connection from " <>
"ip: #{inspect(peer_ip)}, port: #{inspect(peer_port)}. Message: #{inspect(message)}"
)
c ->
send(c, {:receive, message})
end
{:noreply, new_state}
end
# Deliver messages to be sent to a peer
def handle_info({:deliver, message, {host, port} = _peer, tag}, %{socket: socket} = state) do
data = Message.encode(message)
ip = normalize_host(host)
debug("CoAP socket sending raw data #{to_hex(data)} to #{inspect({ip, port})}")
:telemetry.execute(
[:coap_ex, :connection, :data_sent],
%{size: byte_size(data)},
%{host: ip, port: port, message_id: message.message_id, token: message.token, tag: tag}
)
:gen_udp.send(socket, ip, port, data)
{:noreply, state}
end
# Handles message for completed connection
# Removes complete connection from the registry and monitoring
def handle_info({:DOWN, ref, :process, _from, reason}, state) do
{host, port, _} = Map.get(state.monitors, ref)
:telemetry.execute(
[:coap_ex, :connection, :connection_ended],
%{},
%{type: type(state), host: host, port: port}
)
connection_complete(type(state), ref, reason, state)
end
defp connection_complete(:server, ref, reason, %{monitors: monitors} = state) do
connection_id = Map.get(monitors, ref)
connection = Map.get(state[:connections], connection_id)
debug(
"CoAP socket SERVER received DOWN:#{reason} in CoAP.SocketServer from:#{
inspect(connection_id)
}:#{inspect(connection)}:#{inspect(ref)}"
)
{:noreply,
%{
state
| connections: Map.delete(state.connections, connection_id),
monitors: Map.delete(monitors, ref)
}}
end
defp connection_complete(:client, ref, reason, %{monitors: monitors} = state) do
connection_id = Map.get(monitors, ref)
connection = Map.get(state[:connections], connection_id)
debug(
"CoAP socket CLIENT received DOWN:#{reason} in CoAP.SocketServer from: #{
inspect(connection_id)
}:#{inspect(connection)}:#{inspect(ref)}"
)
{:stop, :normal, state}
end
defp connection_for(_request, connection_id, state) do
connection = Map.get(state.connections, connection_id)
case {connection, type(state)} do
{nil, :server} ->
{:ok, conn} = start_connection(self(), state.endpoint, connection_id, state.config)
ref = Process.monitor(conn)
debug("Started conn: #{inspect(conn)} for #{inspect(connection_id)}")
{host, port, token} = connection_id
:telemetry.execute(
[:coap_ex, :connection, :connection_started],
%{},
%{host: host, port: port, token: token}
)
{
conn,
%{
state
| connections: Map.put(state.connections, connection_id, conn),
monitors: Map.put(state.monitors, ref, connection_id)
}
}
{nil, :client} ->
# if this socket server was started to listen for a response as a client,
# the associated connection that was started by the client has died
{nil, state}
_ ->
{connection, state}
end
end
defp start_connection(server, endpoint, peer, config) do
DynamicSupervisor.start_child(
CoAP.ConnectionSupervisor,
{
CoAP.Connection,
[server, endpoint, peer, config]
}
)
end
defp normalize_host(host) when is_tuple(host), do: host
defp normalize_host(host) when is_binary(host) do
host
|> to_charlist()
|> normalize_host()
end
defp normalize_host(host) when is_list(host) do
host
|> :inet.getaddr(:inet)
|> case do
{:ok, ip} -> ip
{:error, _reason} -> nil
end
end
defp type(%{port: 0}), do: :client
defp type(_), do: :server
end
| 30.106996 | 97 | 0.637233 |
ff12eb6ec42c4eb03c8cbdb4b16a56782eaaecc7 | 780 | ex | Elixir | lib/real_world/accounts/user.ex | codefordenver/new-code-for-denver-site | af6cc232c898a197601b4d37e0cd217d0503fd9b | [
"MIT"
] | 1 | 2019-09-28T05:50:46.000Z | 2019-09-28T05:50:46.000Z | lib/real_world/accounts/user.ex | codefordenver/new-code-for-denver-site | af6cc232c898a197601b4d37e0cd217d0503fd9b | [
"MIT"
] | null | null | null | lib/real_world/accounts/user.ex | codefordenver/new-code-for-denver-site | af6cc232c898a197601b4d37e0cd217d0503fd9b | [
"MIT"
] | null | null | null | defmodule RealWorld.Accounts.User do
@moduledoc """
The User model.
"""
use Ecto.Schema
import Ecto.Changeset
@required_fields ~w(email username password)a
@optional_fields ~w(bio image)a
schema "users" do
field :email, :string, unique: true
field :password, :string
field :username, :string, unique: true
field :bio, :string
field :image, :string
has_many :articles, RealWorld.Blog.Article
has_many :comments, RealWorld.Blog.Comment
timestamps inserted_at: :created_at
end
def changeset(user, attrs) do
user
|> cast(attrs, @required_fields ++ @optional_fields)
|> validate_required(@required_fields)
|> unique_constraint(:username, name: :users_username_index)
|> unique_constraint(:email)
end
end
| 22.941176 | 64 | 0.7 |
ff1329b60084267b774b9c489f83e402ea5400aa | 22,060 | ex | Elixir | clients/compute/lib/google_api/compute/v1/api/region_backend_services.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/api/region_backend_services.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/api/region_backend_services.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Api.RegionBackendServices do
@moduledoc """
API calls for all endpoints tagged `RegionBackendServices`.
"""
alias GoogleApi.Compute.V1.Connection
import GoogleApi.Compute.V1.RequestBuilder
@doc """
Deletes the specified regional BackendService resource.
## Parameters
- connection (GoogleApi.Compute.V1.Connection): Connection to server
- project (String): Project ID for this request.
- region (String): Name of the region scoping this request.
- backend_service (String): Name of the BackendService resource to delete.
- opts (KeywordList): [optional] Optional parameters
- :alt (String): Data format for the response.
- :fields (String): Selector specifying which fields to include in a partial response.
- :key (String): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String): OAuth 2.0 token for the current user.
- :pretty_print (Boolean): Returns response with indentations and line breaks.
- :quota_user (String): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
- :user_ip (String): IP address of the site where the request originates. Use this if you want to enforce per-user limits.
- :request_id (String): An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
## Returns
{:ok, %GoogleApi.Compute.V1.Model.Operation{}} on success
{:error, info} on failure
"""
@spec compute_region_backend_services_delete(Tesla.Env.client, String.t, String.t, String.t, keyword()) :: {:ok, GoogleApi.Compute.V1.Model.Operation.t} | {:error, Tesla.Env.t}
def compute_region_backend_services_delete(connection, project, region, backend_service, opts \\ []) do
optional_params = %{
:"alt" => :query,
:"fields" => :query,
:"key" => :query,
:"oauth_token" => :query,
:"prettyPrint" => :query,
:"quotaUser" => :query,
:"userIp" => :query,
:"requestId" => :query
}
%{}
|> method(:delete)
|> url("/#{project}/regions/#{region}/backendServices/#{backend_service}")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%GoogleApi.Compute.V1.Model.Operation{})
end
@doc """
Returns the specified regional BackendService resource.
## Parameters
- connection (GoogleApi.Compute.V1.Connection): Connection to server
- project (String): Project ID for this request.
- region (String): Name of the region scoping this request.
- backend_service (String): Name of the BackendService resource to return.
- opts (KeywordList): [optional] Optional parameters
- :alt (String): Data format for the response.
- :fields (String): Selector specifying which fields to include in a partial response.
- :key (String): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String): OAuth 2.0 token for the current user.
- :pretty_print (Boolean): Returns response with indentations and line breaks.
- :quota_user (String): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
- :user_ip (String): IP address of the site where the request originates. Use this if you want to enforce per-user limits.
## Returns
{:ok, %GoogleApi.Compute.V1.Model.BackendService{}} on success
{:error, info} on failure
"""
@spec compute_region_backend_services_get(Tesla.Env.client, String.t, String.t, String.t, keyword()) :: {:ok, GoogleApi.Compute.V1.Model.BackendService.t} | {:error, Tesla.Env.t}
def compute_region_backend_services_get(connection, project, region, backend_service, opts \\ []) do
optional_params = %{
:"alt" => :query,
:"fields" => :query,
:"key" => :query,
:"oauth_token" => :query,
:"prettyPrint" => :query,
:"quotaUser" => :query,
:"userIp" => :query
}
%{}
|> method(:get)
|> url("/#{project}/regions/#{region}/backendServices/#{backend_service}")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%GoogleApi.Compute.V1.Model.BackendService{})
end
@doc """
Gets the most recent health check results for this regional BackendService.
## Parameters
- connection (GoogleApi.Compute.V1.Connection): Connection to server
- project (String):
- region (String): Name of the region scoping this request.
- backend_service (String): Name of the BackendService resource to which the queried instance belongs.
- opts (KeywordList): [optional] Optional parameters
- :alt (String): Data format for the response.
- :fields (String): Selector specifying which fields to include in a partial response.
- :key (String): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String): OAuth 2.0 token for the current user.
- :pretty_print (Boolean): Returns response with indentations and line breaks.
- :quota_user (String): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
- :user_ip (String): IP address of the site where the request originates. Use this if you want to enforce per-user limits.
- :body (ResourceGroupReference):
## Returns
{:ok, %GoogleApi.Compute.V1.Model.BackendServiceGroupHealth{}} on success
{:error, info} on failure
"""
@spec compute_region_backend_services_get_health(Tesla.Env.client, String.t, String.t, String.t, keyword()) :: {:ok, GoogleApi.Compute.V1.Model.BackendServiceGroupHealth.t} | {:error, Tesla.Env.t}
def compute_region_backend_services_get_health(connection, project, region, backend_service, opts \\ []) do
optional_params = %{
:"alt" => :query,
:"fields" => :query,
:"key" => :query,
:"oauth_token" => :query,
:"prettyPrint" => :query,
:"quotaUser" => :query,
:"userIp" => :query,
:"body" => :body
}
%{}
|> method(:post)
|> url("/#{project}/regions/#{region}/backendServices/#{backend_service}/getHealth")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%GoogleApi.Compute.V1.Model.BackendServiceGroupHealth{})
end
@doc """
Creates a regional BackendService resource in the specified project using the data included in the request. There are several restrictions and guidelines to keep in mind when creating a regional backend service. Read Restrictions and Guidelines for more information.
## Parameters
- connection (GoogleApi.Compute.V1.Connection): Connection to server
- project (String): Project ID for this request.
- region (String): Name of the region scoping this request.
- opts (KeywordList): [optional] Optional parameters
- :alt (String): Data format for the response.
- :fields (String): Selector specifying which fields to include in a partial response.
- :key (String): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String): OAuth 2.0 token for the current user.
- :pretty_print (Boolean): Returns response with indentations and line breaks.
- :quota_user (String): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
- :user_ip (String): IP address of the site where the request originates. Use this if you want to enforce per-user limits.
- :request_id (String): An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
- :body (BackendService):
## Returns
{:ok, %GoogleApi.Compute.V1.Model.Operation{}} on success
{:error, info} on failure
"""
@spec compute_region_backend_services_insert(Tesla.Env.client, String.t, String.t, keyword()) :: {:ok, GoogleApi.Compute.V1.Model.Operation.t} | {:error, Tesla.Env.t}
def compute_region_backend_services_insert(connection, project, region, opts \\ []) do
optional_params = %{
:"alt" => :query,
:"fields" => :query,
:"key" => :query,
:"oauth_token" => :query,
:"prettyPrint" => :query,
:"quotaUser" => :query,
:"userIp" => :query,
:"requestId" => :query,
:"body" => :body
}
%{}
|> method(:post)
|> url("/#{project}/regions/#{region}/backendServices")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%GoogleApi.Compute.V1.Model.Operation{})
end
@doc """
Retrieves the list of regional BackendService resources available to the specified project in the given region.
## Parameters
- connection (GoogleApi.Compute.V1.Connection): Connection to server
- project (String): Project ID for this request.
- region (String): Name of the region scoping this request.
- opts (KeywordList): [optional] Optional parameters
- :alt (String): Data format for the response.
- :fields (String): Selector specifying which fields to include in a partial response.
- :key (String): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String): OAuth 2.0 token for the current user.
- :pretty_print (Boolean): Returns response with indentations and line breaks.
- :quota_user (String): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
- :user_ip (String): IP address of the site where the request originates. Use this if you want to enforce per-user limits.
- :filter (String): Sets a filter {expression} for filtering listed resources. Your {expression} must be in the format: field_name comparison_string literal_string. The field_name is the name of the field you want to compare. Only atomic field types are supported (string, number, boolean). The comparison_string must be either eq (equals) or ne (not equals). The literal_string is the string value to filter to. The literal value must be valid for the type of field you are filtering by (string, number, boolean). For string fields, the literal value is interpreted as a regular expression using RE2 syntax. The literal value must match the entire field. For example, to filter for instances that do not have a name of example-instance, you would use name ne example-instance. You can filter on nested fields. For example, you could filter on instances that have set the scheduling.automaticRestart field to true. Use filtering on nested fields to take advantage of labels to organize and search for results based on label values. To filter on multiple expressions, provide each separate expression within parentheses. For example, (scheduling.automaticRestart eq true) (zone eq us-central1-f). Multiple expressions are treated as AND expressions, meaning that resources must match all expressions to pass the filters.
- :max_results (Integer): The maximum number of results per page that should be returned. If the number of available results is larger than maxResults, Compute Engine returns a nextPageToken that can be used to get the next page of results in subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default: 500)
- :order_by (String): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name. You can also sort results in descending order based on the creation timestamp using orderBy=\"creationTimestamp desc\". This sorts results based on the creationTimestamp field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first. Currently, only sorting by name or creationTimestamp desc is supported.
- :page_token (String): Specifies a page token to use. Set pageToken to the nextPageToken returned by a previous list request to get the next page of results.
## Returns
{:ok, %GoogleApi.Compute.V1.Model.BackendServiceList{}} on success
{:error, info} on failure
"""
@spec compute_region_backend_services_list(Tesla.Env.client, String.t, String.t, keyword()) :: {:ok, GoogleApi.Compute.V1.Model.BackendServiceList.t} | {:error, Tesla.Env.t}
def compute_region_backend_services_list(connection, project, region, opts \\ []) do
optional_params = %{
:"alt" => :query,
:"fields" => :query,
:"key" => :query,
:"oauth_token" => :query,
:"prettyPrint" => :query,
:"quotaUser" => :query,
:"userIp" => :query,
:"filter" => :query,
:"maxResults" => :query,
:"orderBy" => :query,
:"pageToken" => :query
}
%{}
|> method(:get)
|> url("/#{project}/regions/#{region}/backendServices")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%GoogleApi.Compute.V1.Model.BackendServiceList{})
end
@doc """
Updates the specified regional BackendService resource with the data included in the request. There are several restrictions and guidelines to keep in mind when updating a backend service. Read Restrictions and Guidelines for more information. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
## Parameters
- connection (GoogleApi.Compute.V1.Connection): Connection to server
- project (String): Project ID for this request.
- region (String): Name of the region scoping this request.
- backend_service (String): Name of the BackendService resource to patch.
- opts (KeywordList): [optional] Optional parameters
- :alt (String): Data format for the response.
- :fields (String): Selector specifying which fields to include in a partial response.
- :key (String): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String): OAuth 2.0 token for the current user.
- :pretty_print (Boolean): Returns response with indentations and line breaks.
- :quota_user (String): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
- :user_ip (String): IP address of the site where the request originates. Use this if you want to enforce per-user limits.
- :request_id (String): An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
- :body (BackendService):
## Returns
{:ok, %GoogleApi.Compute.V1.Model.Operation{}} on success
{:error, info} on failure
"""
@spec compute_region_backend_services_patch(Tesla.Env.client, String.t, String.t, String.t, keyword()) :: {:ok, GoogleApi.Compute.V1.Model.Operation.t} | {:error, Tesla.Env.t}
def compute_region_backend_services_patch(connection, project, region, backend_service, opts \\ []) do
optional_params = %{
:"alt" => :query,
:"fields" => :query,
:"key" => :query,
:"oauth_token" => :query,
:"prettyPrint" => :query,
:"quotaUser" => :query,
:"userIp" => :query,
:"requestId" => :query,
:"body" => :body
}
%{}
|> method(:patch)
|> url("/#{project}/regions/#{region}/backendServices/#{backend_service}")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%GoogleApi.Compute.V1.Model.Operation{})
end
@doc """
Updates the specified regional BackendService resource with the data included in the request. There are several restrictions and guidelines to keep in mind when updating a backend service. Read Restrictions and Guidelines for more information.
## Parameters
- connection (GoogleApi.Compute.V1.Connection): Connection to server
- project (String): Project ID for this request.
- region (String): Name of the region scoping this request.
- backend_service (String): Name of the BackendService resource to update.
- opts (KeywordList): [optional] Optional parameters
- :alt (String): Data format for the response.
- :fields (String): Selector specifying which fields to include in a partial response.
- :key (String): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String): OAuth 2.0 token for the current user.
- :pretty_print (Boolean): Returns response with indentations and line breaks.
- :quota_user (String): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
- :user_ip (String): IP address of the site where the request originates. Use this if you want to enforce per-user limits.
- :request_id (String): An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
- :body (BackendService):
## Returns
{:ok, %GoogleApi.Compute.V1.Model.Operation{}} on success
{:error, info} on failure
"""
@spec compute_region_backend_services_update(Tesla.Env.client, String.t, String.t, String.t, keyword()) :: {:ok, GoogleApi.Compute.V1.Model.Operation.t} | {:error, Tesla.Env.t}
def compute_region_backend_services_update(connection, project, region, backend_service, opts \\ []) do
optional_params = %{
:"alt" => :query,
:"fields" => :query,
:"key" => :query,
:"oauth_token" => :query,
:"prettyPrint" => :query,
:"quotaUser" => :query,
:"userIp" => :query,
:"requestId" => :query,
:"body" => :body
}
%{}
|> method(:put)
|> url("/#{project}/regions/#{region}/backendServices/#{backend_service}")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%GoogleApi.Compute.V1.Model.Operation{})
end
end
| 62.670455 | 1,327 | 0.720172 |
ff132eb62c02fbbb83972a05759f61f953acbc78 | 1,789 | ex | Elixir | clients/dataproc/lib/google_api/dataproc/v1/model/list_operations_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dataproc/lib/google_api/dataproc/v1/model/list_operations_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dataproc/lib/google_api/dataproc/v1/model/list_operations_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dataproc.V1.Model.ListOperationsResponse do
@moduledoc """
The response message for Operations.ListOperations.
## Attributes
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - The standard List next-page token.
* `operations` (*type:* `list(GoogleApi.Dataproc.V1.Model.Operation.t)`, *default:* `nil`) - A list of operations that matches the specified filter in the request.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:nextPageToken => String.t() | nil,
:operations => list(GoogleApi.Dataproc.V1.Model.Operation.t()) | nil
}
field(:nextPageToken)
field(:operations, as: GoogleApi.Dataproc.V1.Model.Operation, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Dataproc.V1.Model.ListOperationsResponse do
def decode(value, options) do
GoogleApi.Dataproc.V1.Model.ListOperationsResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataproc.V1.Model.ListOperationsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.78 | 167 | 0.737283 |
ff133a1ad032c49231b7375103db5d0db7fdc321 | 8,680 | exs | Elixir | test/pow/ecto/context_test.exs | crooke/pow | 34ef1282260f2765b0b3df14485771b5e9c2094b | [
"MIT"
] | null | null | null | test/pow/ecto/context_test.exs | crooke/pow | 34ef1282260f2765b0b3df14485771b5e9c2094b | [
"MIT"
] | null | null | null | test/pow/ecto/context_test.exs | crooke/pow | 34ef1282260f2765b0b3df14485771b5e9c2094b | [
"MIT"
] | null | null | null | defmodule Pow.Ecto.ContextTest do
use Pow.Test.Ecto.TestCase
doctest Pow.Ecto.Context
defmodule TimingAttackUser do
@moduledoc false
use Ecto.Schema
use Pow.Ecto.Schema, password_hash_methods: {&__MODULE__.send_hash_password/1, &__MODULE__.send_verify_password/2}
alias Pow.Ecto.Schema.Password
schema "users" do
pow_user_fields()
timestamps()
end
def send_hash_password(password) do
send(self(), {:password_hash, password})
Password.pbkdf2_hash(password)
end
def send_verify_password(password, password_hash) do
send(self(), {:password_verify, password, password_hash})
Password.pbkdf2_verify(password, password_hash)
end
end
alias Ecto.Changeset
alias Pow.Ecto.{Context, Schema.Password}
alias Pow.Test.Ecto.{Repo, Users, Users.User, Users.UsernameUser}
@config [repo: Repo, user: User]
@username_config [repo: Repo, user: UsernameUser]
defmodule CustomUsers do
def get_by([email: :test]), do: %User{email: :ok, password_hash: Password.pbkdf2_hash("secret1234")}
end
describe "authenticate/2" do
@password "secret1234"
@valid_params %{"email" => "test@example.com", "password" => @password}
@valid_params_username %{"username" => "john.doe", "password" => @password}
setup do
password_hash = Password.pbkdf2_hash(@password)
user =
%User{}
|> Changeset.change(email: "test@example.com", password_hash: password_hash)
|> Repo.insert!()
username_user =
%UsernameUser{}
|> Changeset.change(username: "john.doe", password_hash: password_hash)
|> Repo.insert!()
{:ok, %{user: user, username_user: username_user}}
end
test "requires user schema mod in config" do
assert_raise Pow.Config.ConfigError, "No `:user` configuration option found.", fn ->
Context.authenticate(@valid_params, Keyword.delete(@config, :user))
end
end
test "requires repo in config" do
assert_raise Pow.Config.ConfigError, "No `:repo` configuration option found.", fn ->
Context.authenticate(@valid_params, Keyword.delete(@config, :repo))
end
end
test "authenticates", %{user: user, username_user: username_user} do
refute Context.authenticate(Map.put(@valid_params, "email", "other@example.com"), @config)
refute Context.authenticate(Map.put(@valid_params, "password", "invalid"), @config)
assert Context.authenticate(@valid_params, @config) == user
refute Context.authenticate(Map.put(@valid_params_username, "username", "jane.doe"), @username_config)
refute Context.authenticate(Map.put(@valid_params_username, "password", "invalid"), @username_config)
assert Context.authenticate(@valid_params_username, @username_config) == username_user
end
test "authenticates with case insensitive value for user id field", %{user: user, username_user: username_user} do
assert Context.authenticate(%{"email" => "TEST@example.COM", "password" => @password}, @config) == user
assert Context.authenticate(%{"username" => "JOHN.doE", "password" => @password}, @username_config) == username_user
end
test "handles nil values" do
refute Context.authenticate(%{"password" => @password}, @config)
refute Context.authenticate(%{"email" => nil, "password" => @password}, @config)
refute Context.authenticate(%{"email" => "test@example.com"}, @config)
refute Context.authenticate(%{"email" => "test@example.com", "password" => nil}, @config)
end
test "authenticates with extra trailing and leading whitespace for user id field", %{user: user, username_user: username_user} do
assert Context.authenticate(%{"email" => " test@example.com ", "password" => @password}, @config) == user
assert Context.authenticate(%{"username" => " john.doe ", "password" => @password}, @username_config) == username_user
end
test "as `use Pow.Ecto.Context`", %{user: user} do
assert Users.authenticate(@valid_params) == user
assert Users.authenticate(:test_macro) == :ok
end
test "with `:users_context`" do
params = Map.put(@valid_params, "email", :test)
assert %User{email: :ok} = Context.authenticate(params, @config ++ [users_context: CustomUsers])
end
test "prevents timing attack" do
config = [repo: Repo, user: TimingAttackUser]
refute Context.authenticate(Map.put(@valid_params, "email", "other@example.com"), config)
assert_received {:password_hash, ""}
refute Context.authenticate(Map.put(@valid_params, "password", "invalid"), config)
assert_received {:password_verify, "invalid", _any}
assert Context.authenticate(@valid_params, config)
assert_received {:password_verify, "secret1234", _any}
end
end
describe "create/2" do
@valid_params %{
"email" => "test@example.com",
"custom" => "custom",
"password" => @password,
"confirm_password" => @password
}
test "creates" do
assert {:error, _changeset} = Context.create(Map.delete(@valid_params, "confirm_password"), @config)
assert {:ok, user} = Context.create(@valid_params, @config)
assert user.custom == "custom"
refute user.password
refute user.confirm_password
end
test "as `use Pow.Ecto.Context`" do
assert {:ok, user} = Users.create(@valid_params)
assert user.custom == "custom"
refute user.password
assert Users.create(:test_macro) == :ok
end
end
describe "update/2" do
@valid_params %{
"email" => "new@example.com",
"custom" => "custom",
"password" => "new_#{@password}",
"confirm_password" => "new_#{@password}",
"current_password" => @password
}
setup do
password_hash = Password.pbkdf2_hash(@password)
changeset = Changeset.change(%User{}, email: "test@exampe.com", password_hash: password_hash)
{:ok, %{user: Repo.insert!(changeset)}}
end
test "updates", %{user: user} do
assert {:error, _changeset} = Context.update(user, Map.delete(@valid_params, "current_password"), @config)
assert {:ok, updated_user} = Context.update(user, @valid_params, @config)
assert Context.authenticate(@valid_params, @config) == updated_user
assert updated_user.custom == "custom"
refute updated_user.password
refute updated_user.confirm_password
refute updated_user.current_password
end
test "as `use Pow.Ecto.Context`", %{user: user} do
assert {:ok, user} = Users.update(user, @valid_params)
assert user.custom == "custom"
refute user.password
assert Users.update(user, :test_macro) == :ok
end
end
describe "delete/2" do
setup do
changeset = Changeset.change(%User{}, email: "test@example.com")
{:ok, %{user: Repo.insert!(changeset)}}
end
test "deletes", %{user: user} do
assert {:ok, user} = Context.delete(user, @config)
assert user.__meta__.state == :deleted
end
test "as `use Pow.Ecto.Context`", %{user: user} do
assert {:ok, user} = Users.delete(user)
assert user.__meta__.state == :deleted
assert Users.delete(:test_macro) == :ok
end
end
describe "get_by/2" do
@email "test@example.com"
@username "john.doe"
setup do
changeset = Changeset.change(%User{}, email: @email)
changeset_username = Changeset.change(%UsernameUser{}, username: @username)
{:ok, %{user: Repo.insert!(changeset), username_user: Repo.insert!(changeset_username)}}
end
test "retrieves", %{user: user, username_user: username_user} do
get_by_user = Context.get_by([email: @email], @config)
assert get_by_user.id == user.id
get_by_user = Context.get_by([username: @username], @username_config)
assert get_by_user.id == username_user.id
end
test "retrieves with case insensitive user id", %{user: user, username_user: username_user} do
get_by_user = Context.get_by([email: "TEST@EXAMPLE.COM"], @config)
assert get_by_user.id == user.id
get_by_user = Context.get_by([username: "JOHN.DOE"], @username_config)
assert get_by_user.id == username_user.id
end
test "handles nil value before normalization of user id field value" do
assert_raise ArgumentError, ~r/Comparison with nil is forbidden as it is unsafe/, fn ->
Context.get_by([email: nil], @config)
end
end
test "as `use Pow.Ecto.Context`", %{user: user} do
get_by_user = Users.get_by(email: @email)
assert get_by_user.id == user.id
assert Users.get_by(:test_macro) == :ok
end
end
end
| 35.867769 | 133 | 0.665438 |
ff135d8981b1c13c4a4f988fe339a8e8aa36e274 | 1,556 | ex | Elixir | larc_website/test/support/data_case.ex | Cate-Lukner/cate-lukner-internship | 43e8b467287ea3a7955e23f18180cb4f849e6620 | [
"MIT"
] | null | null | null | larc_website/test/support/data_case.ex | Cate-Lukner/cate-lukner-internship | 43e8b467287ea3a7955e23f18180cb4f849e6620 | [
"MIT"
] | null | null | null | larc_website/test/support/data_case.ex | Cate-Lukner/cate-lukner-internship | 43e8b467287ea3a7955e23f18180cb4f849e6620 | [
"MIT"
] | 1 | 2020-05-22T19:21:24.000Z | 2020-05-22T19:21:24.000Z | defmodule LarcWebsite.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use LarcWebsite.DataCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
alias LarcWebsite.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import LarcWebsite.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(LarcWebsite.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(LarcWebsite.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transforms changeset errors into a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Regex.replace(~r"%{(\w+)}", message, fn _, key ->
opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string()
end)
end)
end
end
| 27.785714 | 77 | 0.693445 |
ff136437d273da822028ad6b3fb5ccaf10fd61ea | 1,390 | ex | Elixir | apps/bytepack/lib/bytepack/packages/package.ex | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | 313 | 2020-12-03T17:26:24.000Z | 2022-03-18T09:05:14.000Z | apps/bytepack/lib/bytepack/packages/package.ex | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | null | null | null | apps/bytepack/lib/bytepack/packages/package.ex | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | 57 | 2020-12-03T17:41:53.000Z | 2022-03-17T17:28:16.000Z | defmodule Bytepack.Packages.Package do
use Bytepack.Schema
import Ecto.Changeset
@types ~w(hex npm)
schema "packages" do
field :name, :string
field :type, :string
field :description, :string
field :external_doc_url, :string
field :org_id, :id
has_many :releases, Bytepack.Packages.Release
timestamps()
end
@doc false
def changeset(package, attrs) do
package
|> cast(attrs, [:name, :type, :description])
|> validate_required([:name, :type, :description])
|> validate_inclusion(:type, @types)
|> validate_length(:name, min: 2)
|> validate_format(:name, ~r"^[a-z]\w*$")
|> unique_constraint([:name, :type])
end
def update_changeset(package, attrs) do
package
|> cast(attrs, [:description, :external_doc_url])
|> validate_required([:description])
|> Bytepack.Extensions.Ecto.Validations.normalize_and_validate_url(:external_doc_url)
end
@doc """
Update the given `package_ids` with corresponding ones from `deps_map`.
"""
def add_deps(package_ids, deps_map) do
MapSet.new()
|> add_deps(package_ids, deps_map)
|> MapSet.to_list()
end
defp add_deps(acc, [id | rest], deps_map) do
acc
|> MapSet.put(id)
|> add_deps(rest, deps_map)
|> add_deps(Enum.to_list(Map.get(deps_map, id, [])), deps_map)
end
defp add_deps(acc, [], _deps_map) do
acc
end
end
| 24.821429 | 89 | 0.66259 |
ff1365b9a5e4ed199bbc478affadab3c241fd957 | 1,528 | exs | Elixir | mix.exs | brownt23/imgproxy | 23d541286ac0b3e80a38915a72dcafc1ef1f71ca | [
"MIT"
] | 10 | 2019-12-08T22:06:10.000Z | 2022-03-09T14:40:07.000Z | mix.exs | brownt23/imgproxy | 23d541286ac0b3e80a38915a72dcafc1ef1f71ca | [
"MIT"
] | 3 | 2021-09-25T14:20:57.000Z | 2022-01-17T15:11:34.000Z | mix.exs | brownt23/imgproxy | 23d541286ac0b3e80a38915a72dcafc1ef1f71ca | [
"MIT"
] | 5 | 2020-05-23T17:15:28.000Z | 2022-01-17T09:10:54.000Z | defmodule ImgProxy.MixProject do
use Mix.Project
@source_url "https://github.com/bmuller/imgproxy"
@version "3.0.0"
def project do
[
app: :imgproxy,
aliases: aliases(),
version: @version,
elixir: "~> 1.8",
start_permanent: Mix.env() == :prod,
description: "imgproxy URL generator and helper functions",
deps: deps(),
package: package(),
source_url: @source_url,
docs: docs(),
preferred_cli_env: [test: :test, "ci.test": :test]
]
end
defp docs do
[
extras: [
"CHANGELOG.md": [],
"LICENSE.md": [title: "License"],
"README.md": [title: "Overview"]
],
extra_section: "GUIDES",
main: "readme",
source_url: @source_url,
source_ref: "v#{@version}",
formatters: ["html"]
]
end
defp aliases do
[
"ci.test": [
"format --check-formatted",
"test",
"credo"
]
]
end
def package do
[
files: ["lib", "mix.exs", "README*", "LICENSE*"],
maintainers: ["Brian Muller"],
licenses: ["MIT"],
links: %{
"Changelog" => "https://hexdocs.pm/imgproxy/changelog.html",
"GitHub" => @source_url,
"imgproxy Site" => "https://imgproxy.net"
}
]
end
def application do
[
extra_applications: [:logger]
]
end
defp deps do
[
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:ex_doc, "~> 0.25", only: :dev, runtime: false}
]
end
end
| 20.648649 | 68 | 0.524215 |
ff13705b7384775c064dd764415bbb00ddb04b9a | 5,411 | ex | Elixir | lib/logger/lib/logger/error_handler.ex | namjae/elixir | 6d1561a5939d68fb61f422b83271fbc824847395 | [
"Apache-2.0"
] | 1 | 2021-05-20T13:08:37.000Z | 2021-05-20T13:08:37.000Z | lib/logger/lib/logger/error_handler.ex | namjae/elixir | 6d1561a5939d68fb61f422b83271fbc824847395 | [
"Apache-2.0"
] | null | null | null | lib/logger/lib/logger/error_handler.ex | namjae/elixir | 6d1561a5939d68fb61f422b83271fbc824847395 | [
"Apache-2.0"
] | null | null | null | defmodule Logger.ErrorHandler do
@moduledoc false
@behaviour :gen_event
require Logger
def init({otp?, sasl?, threshold}) do
# We store the Logger PID in the state because when we are shutting
# down the Logger application, the Logger process may be terminated
# and then trying to reach it will lead to crashes. So we send a
# message to a PID, instead of named process, to avoid crashes on
# send since this handler will be removed soon by the supervisor.
state = %{
otp: otp?,
sasl: sasl?,
threshold: threshold,
logger: Process.whereis(Logger),
last_length: 0,
last_time: :os.timestamp(),
dropped: 0
}
{:ok, state}
end
## Handle event
def handle_event({_type, gl, _msg}, state) when node(gl) != node() do
{:ok, state}
end
def handle_event(event, state) do
state = check_threshold(state)
log_event(event, state)
{:ok, state}
end
def handle_call(request, _state) do
exit({:bad_call, request})
end
def handle_info(_msg, state) do
{:ok, state}
end
def code_change(_old_vsn, state, _extra) do
{:ok, state}
end
def terminate(_reason, _state) do
:ok
end
## Helpers
defp log_event({:error, gl, {pid, format, data}}, %{otp: true} = state),
do: log_event(:error, :format, gl, pid, {format, data}, state)
defp log_event({:error_report, gl, {pid, :std_error, format}}, %{otp: true} = state),
do: log_event(:error, :report, gl, pid, {:std_error, format}, state)
defp log_event({:error_report, gl, {pid, :supervisor_report, data}}, %{sasl: true} = state),
do: log_event(:error, :report, gl, pid, {:supervisor_report, data}, state)
defp log_event({:error_report, gl, {pid, :crash_report, data}}, %{sasl: true} = state),
do: log_event(:error, :report, gl, pid, {:crash_report, data}, state)
defp log_event({:warning_msg, gl, {pid, format, data}}, %{otp: true} = state),
do: log_event(:warn, :format, gl, pid, {format, data}, state)
defp log_event({:warning_report, gl, {pid, :std_warning, format}}, %{otp: true} = state),
do: log_event(:warn, :report, gl, pid, {:std_warning, format}, state)
defp log_event({:info_msg, gl, {pid, format, data}}, %{otp: true} = state),
do: log_event(:info, :format, gl, pid, {format, data}, state)
defp log_event({:info_report, gl, {pid, :std_info, format}}, %{otp: true} = state),
do: log_event(:info, :report, gl, pid, {:std_info, format}, state)
defp log_event({:info_report, gl, {pid, :progress, data}}, %{sasl: true} = state),
do: log_event(:info, :report, gl, pid, {:progress, data}, state)
defp log_event(_, _state), do: :ok
defp log_event(level, kind, gl, pid, {type, _} = data, state) do
%{level: min_level, truncate: truncate, utc_log: utc_log?, translators: translators} =
Logger.Config.__data__()
with log when log != :lt <- Logger.compare_levels(level, min_level),
{:ok, message} <- translate(translators, min_level, level, kind, data, truncate) do
message = Logger.Utils.truncate(message, truncate)
# Mode is always async to avoid clogging the error_logger
meta = [pid: ensure_pid(pid), error_logger: ensure_type(type)]
event = {Logger, message, Logger.Utils.timestamp(utc_log?), meta}
:gen_event.notify(state.logger, {level, gl, event})
end
:ok
end
defp ensure_type(type) when is_atom(type), do: type
defp ensure_type(_), do: :format
defp ensure_pid(pid) when is_pid(pid), do: pid
defp ensure_pid(_), do: self()
defp check_threshold(state) do
%{last_time: last_time, last_length: last_length, dropped: dropped, threshold: threshold} =
state
{m, s, _} = current_time = :os.timestamp()
current_length = message_queue_length()
cond do
match?({^m, ^s, _}, last_time) and current_length - last_length > threshold ->
count = drop_messages(current_time, 0)
%{state | dropped: dropped + count, last_length: message_queue_length()}
match?({^m, ^s, _}, last_time) ->
state
true ->
if dropped > 0 do
message =
"Logger dropped #{dropped} OTP/SASL messages as it " <>
"exceeded the amount of #{threshold} messages/second"
Logger.warn(message)
end
%{state | dropped: 0, last_time: current_time, last_length: current_length}
end
end
defp message_queue_length() do
{:message_queue_len, len} = Process.info(self(), :message_queue_len)
len
end
defp drop_messages({m, s, _} = last_time, count) do
case :os.timestamp() do
{^m, ^s, _} ->
receive do
{:notify, _event} -> drop_messages(last_time, count + 1)
after
0 -> count
end
_ ->
count
end
end
defp translate([{mod, fun} | t], min_level, level, kind, data, truncate) do
case apply(mod, fun, [min_level, level, kind, data]) do
{:ok, chardata} -> {:ok, chardata}
:skip -> :skip
:none -> translate(t, min_level, level, kind, data, truncate)
end
end
defp translate([], _min_level, _level, :format, {format, args}, truncate) do
msg =
format
|> Logger.Utils.scan_inspect(args, truncate)
|> :io_lib.build_text()
{:ok, msg}
end
defp translate([], _min_level, _level, :report, {_type, data}, _truncate) do
{:ok, Kernel.inspect(data)}
end
end
| 30.570621 | 95 | 0.630567 |
ff139ab7268cd661cd2e7e1642093d19e1fdf6df | 1,436 | exs | Elixir | apps/omg_watcher_rpc/mix.exs | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher_rpc/mix.exs | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher_rpc/mix.exs | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | defmodule OMG.WatcherRPC.Mixfile do
use Mix.Project
def project() do
[
app: :omg_watcher_rpc,
version: "#{String.trim(File.read!("../../VERSION"))}",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.8",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
deps: deps(),
test_coverage: [tool: ExCoveralls]
]
end
def application() do
[
mod: {OMG.WatcherRPC.Application, []},
extra_applications: [:logger, :runtime_tools, :telemetry]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:prod), do: ["lib"]
defp elixirc_paths(:dev), do: ["lib"]
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp deps() do
[
{:phoenix, "~> 1.3"},
{:plug_cowboy, "~> 1.0"},
{:cors_plug, "~> 2.0"},
{:spandex_phoenix, "~> 0.4.1"},
{:spandex_datadog, "~> 0.4"},
{:telemetry, "~> 0.4.1"},
# UMBRELLA
{:omg_bus, in_umbrella: true},
{:omg_utils, in_umbrella: true},
{:omg_watcher, in_umbrella: true},
# UMBRELLA but test only
{:omg_watcher_info, in_umbrella: true, only: [:test]},
# TEST ONLY
{:ex_machina, "~> 2.3", only: [:test], runtime: false}
]
end
end
| 27.615385 | 63 | 0.559889 |
ff13c622b4f66697243d02d2b8927974c8b7808e | 1,653 | ex | Elixir | clients/docs/lib/google_api/docs/v1/model/header.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/docs/lib/google_api/docs/v1/model/header.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/docs/lib/google_api/docs/v1/model/header.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Docs.V1.Model.Header do
@moduledoc """
A document header.
## Attributes
* `content` (*type:* `list(GoogleApi.Docs.V1.Model.StructuralElement.t)`, *default:* `nil`) - The contents of the header.
The indexes for a header's content begin at zero.
* `headerId` (*type:* `String.t`, *default:* `nil`) - The ID of the header.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:content => list(GoogleApi.Docs.V1.Model.StructuralElement.t()),
:headerId => String.t()
}
field(:content, as: GoogleApi.Docs.V1.Model.StructuralElement, type: :list)
field(:headerId)
end
defimpl Poison.Decoder, for: GoogleApi.Docs.V1.Model.Header do
def decode(value, options) do
GoogleApi.Docs.V1.Model.Header.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Docs.V1.Model.Header do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.788462 | 125 | 0.715064 |
ff13cc0b0e15495540706800ec19635cf5cb1290 | 493 | ex | Elixir | currency/lib/currency_web/views/error_view.ex | ziminyuri/currency_exchange | 3d65d466a519816f606b16bb2f304db65105ca9a | [
"Apache-2.0"
] | null | null | null | currency/lib/currency_web/views/error_view.ex | ziminyuri/currency_exchange | 3d65d466a519816f606b16bb2f304db65105ca9a | [
"Apache-2.0"
] | null | null | null | currency/lib/currency_web/views/error_view.ex | ziminyuri/currency_exchange | 3d65d466a519816f606b16bb2f304db65105ca9a | [
"Apache-2.0"
] | null | null | null | defmodule CurrencyWeb.ErrorView do
use CurrencyWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 29 | 61 | 0.736308 |
ff140f60338912fcad3d71c27d1489b33b0fba03 | 2,282 | ex | Elixir | lib/stripe/fraud/early_fraud_warnings.ex | pointerish/stripity_stripe | 78fc2334d3be27492318e3de2764820c7285e697 | [
"BSD-3-Clause"
] | null | null | null | lib/stripe/fraud/early_fraud_warnings.ex | pointerish/stripity_stripe | 78fc2334d3be27492318e3de2764820c7285e697 | [
"BSD-3-Clause"
] | null | null | null | lib/stripe/fraud/early_fraud_warnings.ex | pointerish/stripity_stripe | 78fc2334d3be27492318e3de2764820c7285e697 | [
"BSD-3-Clause"
] | null | null | null | defmodule Stripe.Fraud.EarlyFraudWarning do
@moduledoc """
Work with Stripe Radar Early Fraud Warnings objects.
You can:
- Retrieve an early fraud warning
- Retrieve all early fraud warnings
Stripe API reference: https://stripe.com/docs/api/radar/early_fraud_warnings
"""
use Stripe.Entity
import Stripe.Request
@typedoc """
One of "card_never_received", or "fraudulent_card_application", or
"made_with_counterfeit_card", or "made_with_lost_card", or "made_with_stolen_card",
or "misc", or "unauthorized_use_of_card"
"""
@type payment_status :: String.t()
@type t :: %__MODULE__{
id: Stripe.id(),
object: String.t(),
actionable: boolean(),
charge: Stripe.id() | Stripe.Charge.t() | nil,
created: Stripe.timestamp(),
fraud_type: String.t(),
livemode: boolean(),
payment_intent: Stripe.id() | Stripe.PaymentIntent.t() | nil
}
defstruct [
:id,
:object,
:actionable,
:charge,
:created,
:fraud_type,
:livemode,
:payment_intent
]
@plural_endpoint "radar/early_fraud_warnings"
@doc ~S"""
Retrieve an Early Fraud Warning
"""
@spec retrieve(Stripe.id() | t, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
def retrieve(id, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}")
|> put_method(:get)
|> make_request()
end
@doc ~S"""
Retrieve a list of Early Fraud Warnings
"""
@spec list(params, Stripe.options()) :: {:ok, Stripe.List.t(t)} | {:error, Stripe.Error.t()}
when params:
%{
optional(:charge) => Stripe.id() | Stripe.Charge.t(),
optional(:payment_intent) => Stripe.id() | Stripe.PaymentIntent.t(),
optional(:ending_before) => t | Stripe.id(),
optional(:limit) => 1..100,
optional(:starting_after) => t | Stripe.id()
}
| %{}
def list(params \\ %{}, opts \\ []) do
new_request(opts)
|> prefix_expansions()
|> put_endpoint(@plural_endpoint)
|> put_method(:get)
|> put_params(params)
|> cast_to_id([:charge, :ending_before, :starting_after])
|> make_request()
end
end
| 28.17284 | 94 | 0.593777 |
ff1426e0760136db52b43a3fb6402a17dfb15089 | 112 | exs | Elixir | test/nexmo_test.exs | cbetta/nexmo-elixir | ff72a1dc9a52f5b1a5d7a11f174312fb2a39d7a8 | [
"MIT"
] | 1 | 2017-02-14T13:30:11.000Z | 2017-02-14T13:30:11.000Z | test/nexmo_test.exs | cbetta/nexmo-elixir | ff72a1dc9a52f5b1a5d7a11f174312fb2a39d7a8 | [
"MIT"
] | null | null | null | test/nexmo_test.exs | cbetta/nexmo-elixir | ff72a1dc9a52f5b1a5d7a11f174312fb2a39d7a8 | [
"MIT"
] | null | null | null | defmodule NexmoTest do
use ExUnit.Case
doctest Nexmo
test "the truth" do
assert 1 + 1 == 2
end
end
| 12.444444 | 22 | 0.660714 |
ff143c396e8e52d9517c97780fa7abe07421d4d3 | 360 | ex | Elixir | lib/web/views/layout_view.ex | smartlogic/smartnote | 2a87df6dfe0a9810f646fcaf9472d831028af6c5 | [
"MIT"
] | 1 | 2021-07-12T14:27:11.000Z | 2021-07-12T14:27:11.000Z | lib/web/views/layout_view.ex | smartlogic/smartnote | 2a87df6dfe0a9810f646fcaf9472d831028af6c5 | [
"MIT"
] | 20 | 2020-07-01T14:54:13.000Z | 2021-08-02T15:35:31.000Z | lib/web/views/layout_view.ex | smartlogic/smartnote | 2a87df6dfe0a9810f646fcaf9472d831028af6c5 | [
"MIT"
] | 2 | 2020-06-20T00:48:34.000Z | 2021-01-22T02:04:38.000Z | defmodule Web.LayoutView do
use Web, :view
import Web.Gettext, only: [gettext: 1]
def search_term(conn) do
conn.assigns[:search_term]
end
def question_header_text(conn) do
search_term = Web.LayoutView.search_term(conn)
if is_nil(search_term) do
"Questions"
else
"Questions matching '#{search_term}'"
end
end
end
| 18 | 50 | 0.683333 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.