hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ff5f8e97d5d87da740cf4b4d572bdc1e02546ae7 | 2,076 | ex | Elixir | clients/cloud_identity/lib/google_api/cloud_identity/v1/model/google_apps_cloudidentity_devices_v1_list_devices_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/cloud_identity/lib/google_api/cloud_identity/v1/model/google_apps_cloudidentity_devices_v1_list_devices_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/cloud_identity/lib/google_api/cloud_identity/v1/model/google_apps_cloudidentity_devices_v1_list_devices_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudIdentity.V1.Model.GoogleAppsCloudidentityDevicesV1ListDevicesResponse do
@moduledoc """
Response message that is returned from the ListDevices method.
## Attributes
* `devices` (*type:* `list(GoogleApi.CloudIdentity.V1.Model.GoogleAppsCloudidentityDevicesV1Device.t)`, *default:* `nil`) - Devices meeting the list restrictions.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - Token to retrieve the next page of results. Empty if there are no more results.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:devices =>
list(GoogleApi.CloudIdentity.V1.Model.GoogleAppsCloudidentityDevicesV1Device.t()),
:nextPageToken => String.t()
}
field(:devices,
as: GoogleApi.CloudIdentity.V1.Model.GoogleAppsCloudidentityDevicesV1Device,
type: :list
)
field(:nextPageToken)
end
defimpl Poison.Decoder,
for: GoogleApi.CloudIdentity.V1.Model.GoogleAppsCloudidentityDevicesV1ListDevicesResponse do
def decode(value, options) do
GoogleApi.CloudIdentity.V1.Model.GoogleAppsCloudidentityDevicesV1ListDevicesResponse.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.CloudIdentity.V1.Model.GoogleAppsCloudidentityDevicesV1ListDevicesResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.6 | 166 | 0.751445 |
ff5fab2b0dbd0bc9d0f0d9159c398564e46a6983 | 405 | exs | Elixir | priv/repo/migrations/20210324043952_create_characterizations.exs | conradwt/flix-elixir | e4d6bf6fd79be12fbed6fb6250f78e929247c1a4 | [
"MIT"
] | 3 | 2021-03-21T23:52:16.000Z | 2021-06-02T03:47:00.000Z | priv/repo/migrations/20210324043952_create_characterizations.exs | conradwt/flix-elixir | e4d6bf6fd79be12fbed6fb6250f78e929247c1a4 | [
"MIT"
] | 44 | 2021-04-09T04:04:13.000Z | 2022-03-29T06:29:37.000Z | priv/repo/migrations/20210324043952_create_characterizations.exs | conradwt/flix-elixir | e4d6bf6fd79be12fbed6fb6250f78e929247c1a4 | [
"MIT"
] | null | null | null | defmodule Flix.Repo.Migrations.CreateCharacterizations do
use Ecto.Migration
def change do
create table(:characterizations) do
add :movie_id, references(:movies, on_delete: :delete_all)
add :genre_id, references(:genres, on_delete: :delete_all)
timestamps()
end
create index(:characterizations, [:movie_id])
create index(:characterizations, [:genre_id])
end
end
| 25.3125 | 64 | 0.720988 |
ff602837e8de31be2fec7c7a9c769dba3e7dea54 | 1,295 | ex | Elixir | DL-DLR2-006/DL-DLR2-006.ELEMENT-IoT.ex | Realscrat/decentlab-decoders | 3ca5006cd85e3772a15a1b3fff3922c50979eeb6 | [
"MIT"
] | 13 | 2020-01-18T22:08:44.000Z | 2022-02-06T14:19:57.000Z | DL-DLR2-006/DL-DLR2-006.ELEMENT-IoT.ex | Realscrat/decentlab-decoders | 3ca5006cd85e3772a15a1b3fff3922c50979eeb6 | [
"MIT"
] | 4 | 2019-05-10T07:17:41.000Z | 2021-10-20T16:24:04.000Z | DL-DLR2-006/DL-DLR2-006.ELEMENT-IoT.ex | Realscrat/decentlab-decoders | 3ca5006cd85e3772a15a1b3fff3922c50979eeb6 | [
"MIT"
] | 15 | 2019-06-04T06:13:32.000Z | 2022-02-15T07:28:52.000Z |
# https://www.decentlab.com/products/analog-or-digital-sensor-device-for-lorawan
defmodule Parser do
use Platform.Parsing.Behaviour
## test payloads
# 0211110003409a00860c54
# 02111100020c54
def fields do
[
%{field: "potentiometer_position", display: "Potentiometer position", unit: ""},
%{field: "battery_voltage", display: "Battery voltage", unit: "V"}
]
end
def parse(<<2, device_id::size(16), flags::binary-size(2), words::binary>>, _meta) do
{_remaining, result} =
{words, %{:device_id => device_id, :protocol_version => 2}}
|> sensor0(flags)
|> sensor1(flags)
result
end
defp sensor0({<<x0::size(16), x1::size(16), remaining::binary>>, result},
<<_::size(15), 1::size(1), _::size(0)>>) do
{remaining,
Map.merge(result,
%{
:potentiometer_position => ((x0 + x1*65536) / 8388608 - 1) * 1
})}
end
defp sensor0(result, _flags), do: result
defp sensor1({<<x0::size(16), remaining::binary>>, result},
<<_::size(14), 1::size(1), _::size(1)>>) do
{remaining,
Map.merge(result,
%{
:battery_voltage => x0 / 1000
})}
end
defp sensor1(result, _flags), do: result
end | 26.979167 | 87 | 0.564479 |
ff602fe4440d8aa76f47ae215238a8a0c03dd9c2 | 8,697 | ex | Elixir | lib/epi_contacts/commcare_sms_trigger.ex | RatioPBC/epi-contacts | 6c43eea52cbfe2097f48b02e3d0c8fce3b46f1ee | [
"Apache-2.0"
] | null | null | null | lib/epi_contacts/commcare_sms_trigger.ex | RatioPBC/epi-contacts | 6c43eea52cbfe2097f48b02e3d0c8fce3b46f1ee | [
"Apache-2.0"
] | 13 | 2021-06-29T04:35:41.000Z | 2022-02-09T04:25:39.000Z | lib/epi_contacts/commcare_sms_trigger.ex | RatioPBC/epi-contacts | 6c43eea52cbfe2097f48b02e3d0c8fce3b46f1ee | [
"Apache-2.0"
] | null | null | null | defmodule EpiContacts.CommcareSmsTrigger do
@moduledoc """
Encapsulates the logic as to if a case should be updated with a secure_id.
"""
use Oban.Worker, queue: :default
require Logger
alias EpiContacts.{Commcare, Encryption, PatientCase}
defmodule PatientCaseWrapper do
@moduledoc """
A stuct that wraps the data, mainly so we can roll out county-by-county via FunWithFlags.
"""
defstruct [:patient_case]
def new(params) do
%__MODULE__{
patient_case: params
}
end
end
defimpl FunWithFlags.Actor, for: PatientCaseWrapper do
def id(%{patient_case: patient_case}) do
domain = PatientCase.domain(patient_case)
"domain:#{domain}"
end
end
@sms_trigger_feature_flag :commcare_secure_id
@pre_ci_feature_flag :pre_ci
@pre_ci_surge_feature_flag :pre_ci_surge
@maximum_pre_ci_case_age 10
# years old; don't send to minors
@minimum_age 18
@minimum_age_feature_flag :minimum_age
def sms_trigger_feature_flag, do: @sms_trigger_feature_flag
def pre_ci_feature_flag, do: @pre_ci_feature_flag
def pre_ci_surge_feature_flag, do: @pre_ci_surge_feature_flag
def minimum_age_feature_flag, do: @minimum_age_feature_flag
@doc """
Accepts a patient case, encrypts it, and inserts the job.
"""
@spec enqueue!(map) :: Oban.Job.t()
def enqueue!(patient_case) do
patient_case
|> prepare_args()
|> new()
|> Oban.insert!()
end
@spec prepare_args(map) :: map()
@doc """
Sets up the encrypted patient case and nonce arguments so that
any perform/1 can decrypt the patient case.
Returns %{
"case_id" => String.t(),
"domain" => String.t(),
"encrypted_patient_case" => String.t(),
"nonce" => String.t()
}
"""
def prepare_args(%{"case_id" => case_id, "domain" => domain, "properties" => _properties} = patient_case) do
{encrypted_patient_case, nonce} = Encryption.encrypt(patient_case)
%{
"case_id" => case_id,
"domain" => domain,
"encrypted_patient_case" => encrypted_patient_case,
"nonce" => nonce
}
end
defp determine_trigger_reason_for_logging(:pre_ci) do
if FunWithFlags.enabled?(@pre_ci_surge_feature_flag),
do: :pre_ci_surge,
else: :pre_ci
end
defp determine_trigger_reason_for_logging(reason),
do: reason
@impl Oban.Worker
def perform(%_{args: %{"encrypted_patient_case" => encrypted_patient_case, "nonce" => nonce}}) do
with {:ok, patient_case} <- Encryption.decrypt(encrypted_patient_case, nonce),
transaction_id = Ecto.UUID.generate(),
log_transaction(patient_case, transaction_id, "sms_trigger_starting"),
{true, trigger_decision} <- case_meets_preconditions?(patient_case, transaction_id),
loggable_trigger_reason <- determine_trigger_reason_for_logging(trigger_decision),
true <- case_meets_conditions?(patient_case, transaction_id, trigger_decision),
{:ok, _trigger_reason} <- trigger_sms(loggable_trigger_reason, patient_case, transaction_id) do
analytics_reporter().report_sms_triggered(
patient_case: patient_case,
reason: loggable_trigger_reason,
timestamp: DateTime.utc_now()
)
:ok
else
{:error, reason} -> {:error, reason}
{false, nil} -> {:discard, :not_triggered}
{{:error, reason}, _trigger_reason} -> {:error, reason}
false -> {:discard, "conditions not met"}
end
end
def trigger_sms(nil, _patient_case, _transaction_id), do: {false, nil}
def trigger_sms(trigger_reason, patient_case, transaction_id) do
{
domain,
case_id,
patient_case_properties
} = Commcare.PatientCase.properties_for_update(patient_case, transaction_id, trigger_reason)
sms_trigger_result =
commcare_client().update_properties!(
domain,
case_id,
patient_case_properties
)
{sms_trigger_result, trigger_reason}
end
def case_meets_preconditions?(patient_case, transaction_id) do
post_ci_triggered = post_ci_triggered?(patient_case)
pre_ci_triggered = pre_ci_triggered?(patient_case)
pre_ci_minor_triggered = pre_ci_minor_triggered?(patient_case)
trigger_reason = trigger_reason(post_ci_triggered, pre_ci_minor_triggered, pre_ci_triggered)
preconditions_met = post_ci_triggered || pre_ci_triggered
log_transaction(patient_case, transaction_id, "sms_trigger_preconditions", %{
case_eligible_for_sms: case_eligible_for_sms?(patient_case),
case_eligible_for_pre_ci: case_eligible_for_pre_ci?(patient_case),
case_is_manually_triggered: case_is_manually_triggered?(patient_case),
trigger_reason: trigger_reason,
preconditions_met: preconditions_met
})
{preconditions_met, trigger_reason}
end
defp post_ci_triggered?(patient_case) do
case_eligible_for_sms?(patient_case) && case_is_manually_triggered?(patient_case)
end
defp pre_ci_triggered?(patient_case) do
case_eligible_for_sms?(patient_case) && case_eligible_for_pre_ci?(patient_case) &&
!case_is_manually_triggered?(patient_case)
end
defp pre_ci_minor_triggered?(patient_case) do
pre_ci_triggered?(patient_case) && PatientCase.is_minor?(patient_case)
end
defp trigger_reason(true, _, _), do: :post_ci
defp trigger_reason(false, true, _), do: :pre_ci_minor
defp trigger_reason(false, false, true), do: :pre_ci
defp trigger_reason(_, _, _), do: nil
def minimum_age, do: @minimum_age
defp case_eligible_for_sms?(patient_case) do
FunWithFlags.enabled?(@sms_trigger_feature_flag, for: PatientCaseWrapper.new(patient_case))
end
defp case_eligible_for_pre_ci?(patient_case) do
FunWithFlags.enabled?(@pre_ci_feature_flag, for: PatientCaseWrapper.new(patient_case))
end
defp case_is_manually_triggered?(patient_case) do
PatientCase.smc_opt_in?(patient_case)
end
defp acceptable_current_status?(patient_case) do
PatientCase.current_status(patient_case) != "closed"
end
defp acceptable_patient_type?(patient_case) do
PatientCase.patient_type(patient_case) != "pui"
end
defp acceptable_stub?(patient_case) do
!PatientCase.is_stub?(patient_case)
end
defp acceptable_transfer_status?(patient_case) do
PatientCase.transfer_status(patient_case) not in ["pending", "sent"]
end
defp absent_secure_id?(patient_case) do
patient_case
|> PatientCase.secure_id()
|> Euclid.Exists.blank?()
end
def case_meets_conditions?(patient_case, transaction_id, trigger_reason) do
[
acceptable_case_age?(trigger_reason),
acceptable_interview_disposition?(trigger_reason),
&absent_secure_id?/1,
&acceptable_current_status?/1,
&acceptable_patient_type?/1,
&acceptable_stub?/1,
&acceptable_transfer_status?/1,
&PatientCase.has_date_of_birth?/1,
&PatientCase.has_phone_number?/1
]
|> Enum.all?(fn check -> check_property(check, patient_case, transaction_id) end)
|> log_result_of_checks(patient_case, transaction_id)
end
defp acceptable_case_age?(:post_ci), do: fn _ -> true end
defp acceptable_case_age?(_trigger_reason) do
fn patient_case ->
PatientCase.days_between_open_and_modified(patient_case)
|> case do
:error -> true
days -> days < @maximum_pre_ci_case_age
end
end
end
defp acceptable_interview_disposition?(:post_ci), do: fn _ -> true end
defp acceptable_interview_disposition?(_trigger_reason) do
fn patient_case ->
!PatientCase.interview_attempted_or_completed?(patient_case)
end
end
defp log_result_of_checks(result, patient_case, transaction_id) do
log_transaction(patient_case, transaction_id, "sms_trigger_all_checks", %{result: result})
result
end
defp check_property(check, patient_case, transaction_id) do
result = check.(patient_case)
# if FunWithFlags.enabled?(:commcare_secure_id_check_logging) do
check_info = Function.info(check)
check_name = Keyword.get(check_info, :name)
log_transaction(patient_case, transaction_id, "sms_trigger_check_property", %{
check_name: check_name,
result: result
})
# end
result
end
defp log_transaction(patient_case, transaction_id, message, metadata \\ %{}) do
metainfo = %{
module: __MODULE__,
commcare_case_id: PatientCase.case_id(patient_case),
commcare_domain: PatientCase.domain(patient_case),
transaction: transaction_id
}
Logger.info(message, Map.merge(metadata, metainfo))
end
defp analytics_reporter, do: Application.get_env(:epi_contacts, :analytics_reporter)
defp commcare_client, do: Application.get_env(:epi_contacts, :commcare_client)
end
| 31.397112 | 110 | 0.726227 |
ff6053a21f2dfea6638b024a148fe637934b964d | 983 | ex | Elixir | lib/shopify_api/bulk/telemetry.ex | tres/elixir-shopifyapi | 57f379fca062126c8752fcba226f34807370403f | [
"Apache-2.0"
] | 18 | 2019-06-07T13:36:39.000Z | 2021-08-03T21:06:36.000Z | lib/shopify_api/bulk/telemetry.ex | tres/elixir-shopifyapi | 57f379fca062126c8752fcba226f34807370403f | [
"Apache-2.0"
] | 158 | 2018-08-30T22:09:00.000Z | 2021-09-22T01:18:59.000Z | lib/shopify_api/bulk/telemetry.ex | tres/elixir-shopifyapi | 57f379fca062126c8752fcba226f34807370403f | [
"Apache-2.0"
] | 4 | 2020-09-05T00:48:46.000Z | 2020-09-30T15:53:50.000Z | defmodule ShopifyAPI.Bulk.Telemetry do
@moduledoc """
Helper module handle instrumentation with telemetry
"""
def send(module_name, token, data, bulk_id \\ nil)
def send(
module_name,
%{app_name: app, shop_name: shop} = _token,
{:error, type, reason},
bulk_id
) do
metadata = %{
app: app,
shop: shop,
module: module_name,
bulk_id: bulk_id,
type: type,
reason: reason
}
telemetry_execute(:failure, metadata)
end
def send(
module_name,
%{app_name: app, shop_name: shop} = _token,
{:success, type},
_bulk_id
) do
metadata = %{
app: app,
shop: shop,
module: module_name,
type: type
}
telemetry_execute(:success, metadata)
end
defp telemetry_execute(event_status, metadata) do
:telemetry.execute(
[:shopify_api, :bulk_operation, event_status],
%{count: 1},
metadata
)
end
end
| 19.66 | 53 | 0.582909 |
ff605b4cd28eaea3b4075659e9004fcad515bfc3 | 458 | ex | Elixir | todo/lib/todo_web/live/item_live/show.ex | IITA-DMCS-2122/iita_elixir | e636360edd96f0bfcde91eec212a467f10186437 | [
"MIT"
] | null | null | null | todo/lib/todo_web/live/item_live/show.ex | IITA-DMCS-2122/iita_elixir | e636360edd96f0bfcde91eec212a467f10186437 | [
"MIT"
] | 22 | 2021-12-11T13:12:45.000Z | 2022-01-29T14:20:03.000Z | todo/lib/todo_web/live/item_live/show.ex | IITA-DMCS-2122/iita_elixir | e636360edd96f0bfcde91eec212a467f10186437 | [
"MIT"
] | null | null | null | defmodule TodoWeb.ItemLive.Show do
use TodoWeb, :live_view
alias Todo.Todos
@impl true
def mount(_params, _session, socket) do
{:ok, socket}
end
@impl true
def handle_params(%{"id" => id}, _, socket) do
{:noreply,
socket
|> assign(:page_title, page_title(socket.assigns.live_action))
|> assign(:item, Todos.get_item!(id))}
end
defp page_title(:show), do: "Show Item"
defp page_title(:edit), do: "Edit Item"
end
| 20.818182 | 67 | 0.655022 |
ff605e2048f5ae99be291512e145b5c76bd586ac | 1,022 | ex | Elixir | plugins/ucc_chat/lib/ucc_chat_web/flex_bar/tab/user_info.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | plugins/ucc_chat/lib/ucc_chat_web/flex_bar/tab/user_info.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | plugins/ucc_chat/lib/ucc_chat_web/flex_bar/tab/user_info.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | defmodule UccChatWeb.FlexBar.Tab.UserInfo do
use UccChatWeb.FlexBar.Helpers
alias UccChat.{Channel, Direct}
alias UcxUcc.{Accounts, TabBar.Tab}
def add_buttons do
TabBar.add_button Tab.new(
__MODULE__,
~w[direct],
"user-info",
~g"User Info",
"icon-user",
View,
"user_card.html",
30,
[
model: Accounts.User,
get: {Accounts, :get_user, [[preload: [phone_numbers: [:label]]]]},
prefix: "user"
]
)
end
def args(socket, {user_id, channel_id, _, _}, _) do
current_user = Helpers.get_user! user_id
channel = Channel.get!(channel_id)
direct = Direct.get_by user_id: user_id, channel_id: channel_id, preload: [friend: [:account, :roles, user_roles: [:role]]]
# user = Helpers.get_user_by_name(direct.users)
user_info = user_info(channel, direct: true)
{[
user: direct.friend,
current_user: current_user,
channel_id: channel_id,
user_info: user_info
], socket}
end
end
| 24.926829 | 127 | 0.62818 |
ff6078f2a9812f3dd93f06d833950ac9c9c88d66 | 834 | exs | Elixir | projects/api/priv/repo/migrations/20171203172447_add_users_table.exs | strattadb/margaret | dde5d7b42f6d9b4d320069a0117136dae03b13b5 | [
"MIT"
] | 82 | 2017-11-06T01:00:55.000Z | 2020-12-09T10:35:29.000Z | projects/api/priv/repo/migrations/20171203172447_add_users_table.exs | dbstratta/margaret | dde5d7b42f6d9b4d320069a0117136dae03b13b5 | [
"MIT"
] | 98 | 2017-11-06T22:57:32.000Z | 2020-07-03T04:46:39.000Z | projects/api/priv/repo/migrations/20171203172447_add_users_table.exs | strattadb/margaret | dde5d7b42f6d9b4d320069a0117136dae03b13b5 | [
"MIT"
] | 10 | 2017-11-16T05:31:58.000Z | 2020-10-29T18:02:35.000Z | defmodule Margaret.Repo.Migrations.AddUsersTable do
@moduledoc false
use Ecto.Migration
@doc false
def change do
create table(:users) do
add(:username, :string, size: 64, null: false)
add(:email, :string, size: 254, null: false)
add(:unverified_email, :string, size: 254)
add(:first_name, :string)
add(:last_name, :string)
add(:avatar, :string)
add(:bio, :string)
add(:website, :string)
add(:location, :string)
add(:is_employee, :boolean, default: false, null: false)
add(:is_admin, :boolean, default: false, null: false)
add(:deactivated_at, :naive_datetime, default: nil)
add(:settings, :map, null: false)
timestamps()
end
create(unique_index(:users, [:username]))
create(unique_index(:users, [:email]))
end
end
| 23.166667 | 62 | 0.631894 |
ff607c4cbea894c01bbe67dc568b170e43347e7b | 851 | ex | Elixir | lib/one_word/command/start_story.ex | Bentheburrito/onewordstorybot | 2d75f3a59a952d141cfa4306fb14c902815cbff5 | [
"MIT"
] | null | null | null | lib/one_word/command/start_story.ex | Bentheburrito/onewordstorybot | 2d75f3a59a952d141cfa4306fb14c902815cbff5 | [
"MIT"
] | null | null | null | lib/one_word/command/start_story.ex | Bentheburrito/onewordstorybot | 2d75f3a59a952d141cfa4306fb14c902815cbff5 | [
"MIT"
] | null | null | null | defmodule OneWord.Command.StartStory do
@behaviour OneWord.Command
alias Nostrum.Api
require Logger
@impl OneWord.Command
def run(%{channel_id: channel_id} = message, _args) do
with {:ok, game} <- OneWord.GameHandler.get_game(channel_id),
:ok <- OneWord.Game.start_game(game.pid, message.author.id)
do
Api.create_message(channel_id, "Game Started")
else
:already_started -> Api.create_message(channel_id, "A story is already being told.")
:no_auth -> Api.create_message(channel_id, "Only the story-initiator (the user that typed !story) can manualy start the story.")
:error -> Api.create_message(channel_id, "No game in this channel.")
end
end
@impl OneWord.Command
def help(message) do
Api.create_message(message.channel_id, "Manually starts the story in the current channel. Usage: !startstory")
end
end
| 32.730769 | 132 | 0.741481 |
ff6084df2e4c6e65954e3f8ea4a25fa99081844a | 565 | ex | Elixir | lib/ccb_api_ex/model/family.ex | msawka/ccb_api_ex | 5a63eece2218cbe91740b9dea50f50a5036f081f | [
"Apache-2.0"
] | null | null | null | lib/ccb_api_ex/model/family.ex | msawka/ccb_api_ex | 5a63eece2218cbe91740b9dea50f50a5036f081f | [
"Apache-2.0"
] | null | null | null | lib/ccb_api_ex/model/family.ex | msawka/ccb_api_ex | 5a63eece2218cbe91740b9dea50f50a5036f081f | [
"Apache-2.0"
] | null | null | null | defmodule CcbApiEx.Model.Family do
alias CcbApiEx.Model.Individual
defstruct id: nil,
modified: nil,
individuals: nil
def from_list(maps) do
Enum.reduce maps, [], fn(map, result) ->
result ++ [from(map)]
end
end
def from(map) do
{:ok, modified} = if map[:modified] != nil do
NaiveDateTime.from_iso8601("#{map[:modified]}")
else
{:ok, map[:modified]}
end
%CcbApiEx.Model.Family{
id: map[:id],
modified: modified,
individuals: Individual.from_list(map[:individuals])
}
end
end | 20.178571 | 58 | 0.614159 |
ff6085e92a54667f002f3524a51053b978638582 | 1,866 | ex | Elixir | lib/akin/algorithms/tversky.ex | vanessaklee/akin | fc97befb871536b6c7ef0b35740fe6efc032c566 | [
"Apache-2.0"
] | 8 | 2021-10-31T21:24:37.000Z | 2022-02-27T05:30:24.000Z | lib/akin/algorithms/tversky.ex | vanessaklee/akin | fc97befb871536b6c7ef0b35740fe6efc032c566 | [
"Apache-2.0"
] | 1 | 2021-10-31T17:57:41.000Z | 2021-11-09T22:54:50.000Z | lib/akin/algorithms/tversky.ex | vanessaklee/akin | fc97befb871536b6c7ef0b35740fe6efc032c566 | [
"Apache-2.0"
] | null | null | null | defmodule Akin.Tversky do
@moduledoc """
Functions to calculate the Tversky index
between two strings.
"""
@behaviour Akin.Task
import Akin.Util, only: [ngram_tokenize: 2, opts: 2, intersect: 2]
alias Akin.Corpus
@default_alpha 1
@default_beta 1
@spec compare(%Corpus{}, %Corpus{}, Keyword.t()) :: float()
@doc """
Calculates the Tversky index between two strings. Default alpha is 1
and beta is 1. ngram_size is a positive integer greater than 0 used
to tokenize the strings
## Examples
iex> Akin.Tversky.compare(%Akin.Corpus{string: "contact"}, %Akin.Corpus{string: "context"}, [ngram_size: 4])
0.14285714285714285
iex> Akin.Tversky.compare(%Akin.Corpus{string: "contact"}, %Akin.Corpus{string: "context"})
0.3333333333333333
iex> Akin.Tversky.compare(%Akin.Corpus{string: "contact"}, %Akin.Corpus{string: "context"}, [ngram_size: 1])
0.5555555555555556
"""
def compare(%Corpus{} = left, %Corpus{} = right, opts \\ []) do
perform(left, right, opts(opts, :ngram_size))
end
@spec compare(%Corpus{}, %Corpus{}, integer()) :: float()
defp perform(%Corpus{string: left}, %Corpus{string: right}, n)
when n <= 0 or byte_size(left) < n or byte_size(right) < n do
0.0
end
defp perform(%Corpus{string: left}, %Corpus{string: right}, _n) when left == right, do: 1.0
defp perform(%Corpus{string: left}, %Corpus{string: right}, ngram_size)
when is_integer(ngram_size) do
left_ngrams = left |> ngram_tokenize(ngram_size)
right_ngrams = right |> ngram_tokenize(ngram_size)
nmatches = intersect(left_ngrams, right_ngrams) |> length
left_diff_length = (left_ngrams -- right_ngrams) |> length
right_diff_length = (right_ngrams -- left_ngrams) |> length
nmatches / (@default_alpha * left_diff_length + @default_beta * right_diff_length + nmatches)
end
end
| 35.884615 | 112 | 0.687567 |
ff60a6f4fa3d72365a19843f1e212e4b24b97654 | 3,493 | exs | Elixir | .iex.exs | phishx-docs/transform_map | 7dfb9787eb41bbeebcedffb7ede434a414538a1b | [
"MIT"
] | 3 | 2018-07-10T11:01:28.000Z | 2020-02-27T21:32:47.000Z | .iex.exs | phishx-docs/transform_map | 7dfb9787eb41bbeebcedffb7ede434a414538a1b | [
"MIT"
] | null | null | null | .iex.exs | phishx-docs/transform_map | 7dfb9787eb41bbeebcedffb7ede434a414538a1b | [
"MIT"
] | 1 | 2020-09-23T03:28:00.000Z | 2020-09-23T03:28:00.000Z | m = [%{
"_ip" => "104.41.40.73",
"country" => %{
"alpha3Code" => "BRA",
"callingCodes" => ["55"],
"capital" => "Brasília",
"currencies" => [
%{"code" => "BRL", "name" => "Brazilian real", "symbol" => "R$"},
%{"code" => "BRL", "name" => "Brazilian real", "symbol" => "R$"}
],
"flag" => "https://restcountries.eu/data/bra.svg",
"languages" => [
%{
"iso639_1" => "pt",
"iso639_2" => "por",
"name" => "Portuguese",
"nativeName" => "Português"
}
],
"latlng" => [-10.0, -55.0],
"name" => "Brazil",
"nativeName" => "Brasil",
"region" => "Americas",
"regionalBlocs" => [
%{
"acronym" => "USAN",
"name" => "Union of South American Nations",
"otherAcronyms" => ["UNASUR", "UNASUL", "UZAN"],
"otherNames" => ["Unión de Naciones Suramericanas",
"União de Nações Sul-Americanas",
"Unie van Zuid-Amerikaanse Naties", "South American Union"]
}
],
"subregion" => "South America",
"timezones" => ["UTC-05:00", "UTC-04:00", "UTC-03:00", "UTC-02:00"],
"topLevelDomain" => [".br"]
},
"data" => %{
"city" => %{
"geoname_id" => 3467865,
"names" => %{
"de" => "Campinas",
"en" => "Campinas",
"es" => "Campinas",
"fr" => "Campinas",
"ja" => "カンピーナス",
"pt-BR" => "Campinas",
"ru" => "Кампинас",
"zh-CN" => "坎皮纳斯"
}
},
"continent" => %{
"code" => "SA",
"geoname_id" => 6255150,
"names" => %{
"de" => "Südamerika",
"en" => "South America",
"es" => "Sudamérica",
"fr" => "Amérique du Sud",
"ja" => "南アメリカ",
"pt-BR" => "América do Sul",
"ru" => "Южная Америка",
"zh-CN" => "南美洲"
}
},
"country" => %{
"geoname_id" => 3469034,
"iso_code" => "BR",
"names" => %{
"de" => "Brasilien",
"en" => "Brazil",
"es" => "Brasil",
"fr" => "Brésil",
"ja" => "ブラジル連邦共和国",
"pt-BR" => "Brasil",
"ru" => "Бразилия",
"zh-CN" => "巴西"
}
},
"location" => %{
"accuracy_radius" => 1000,
"latitude" => -22.9095,
"longitude" => -47.0674,
"time_zone" => "America/Sao_Paulo"
},
"registered_country" => %{
"geoname_id" => 6252001,
"iso_code" => "US",
"names" => %{
"de" => "USA",
"en" => "United States",
"es" => "Estados Unidos",
"fr" => "États-Unis",
"ja" => "アメリカ合衆国",
"pt-BR" => "Estados Unidos",
"ru" => "США",
"zh-CN" => "美国"
}
},
"subdivisions" => [
%{
"geoname_id" => 3448433,
"iso_code" => "SP",
"names" => %{
"en" => "Sao Paulo",
"es" => "São Paulo",
"pt-BR" => "São Paulo"
}
},
%{
"geoname_id" => 3448433,
"iso_code" => "SP",
"names" => %{
"en" => "Sao Paulo",
"es" => "São Paulo",
"pt-BR" => "São Paulo"
}
}
],
"traits" => %{
"autonomous_system_number" => 8075,
"autonomous_system_organization" => "Microsoft Corporation",
"ip_address" => "104.41.40.73",
"isp" => "Microsoft Corporation",
"organization" => "Microsoft Azure"
}
},
"quantity" => 8245,
"updated_at" => "2018-04-27 18:51:08.671029"
}]
s = m |> TransformMap.multiple_shrink() | 26.664122 | 72 | 0.423705 |
ff60b16a2e16ac0b3133db9e072515b327cd1a45 | 5,170 | exs | Elixir | elixir/high-score/test/high_score_test.exs | hoangbits/exercism | 11a527d63526e07b1eba114ebeb2fddca8f9419c | [
"MIT"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | elixir/high-score/test/high_score_test.exs | hoangbits/exercism | 11a527d63526e07b1eba114ebeb2fddca8f9419c | [
"MIT"
] | 583 | 2017-06-19T10:48:40.000Z | 2022-03-28T21:43:12.000Z | elixir/high-score/test/high_score_test.exs | hoangbits/exercism | 11a527d63526e07b1eba114ebeb2fddca8f9419c | [
"MIT"
] | 228 | 2017-07-05T07:09:32.000Z | 2022-03-27T08:59:08.000Z | defmodule HighScoreTest do
use ExUnit.Case
# Trivia: Scores used in this test suite are based on lines of code
# added to the elixir-lang/elixir github repository as of Apr 27, 2020.
@tag task_id: 1
test "new/1 result in empty score map" do
assert HighScore.new() == %{}
end
describe "add_player/2" do
@tag task_id: 2
test "add player without score to empty score map" do
scores = HighScore.new()
assert HighScore.add_player(scores, "José Valim") == %{"José Valim" => 0}
end
@tag task_id: 2
test "add two players without score to empty map" do
scores =
HighScore.new()
|> HighScore.add_player("José Valim")
|> HighScore.add_player("Chris McCord")
assert scores == %{"Chris McCord" => 0, "José Valim" => 0}
end
@tag task_id: 2
test "add player with score to empty score map" do
scores =
HighScore.new()
|> HighScore.add_player("José Valim", 486_373)
assert scores == %{"José Valim" => 486_373}
end
@tag task_id: 2
test "add players with scores to empty score map" do
scores =
HighScore.new()
|> HighScore.add_player("José Valim", 486_373)
|> HighScore.add_player("Dave Thomas", 2_374)
assert scores == %{"José Valim" => 486_373, "Dave Thomas" => 2_374}
end
end
describe "remove_player/2" do
@tag task_id: 3
test "remove from empty score map results in empty score map" do
scores =
HighScore.new()
|> HighScore.remove_player("José Valim")
assert scores == %{}
end
@tag task_id: 3
test "remove player after adding results in empty score map" do
map =
HighScore.new()
|> HighScore.add_player("José Valim")
|> HighScore.remove_player("José Valim")
assert map == %{}
end
@tag task_id: 3
test "remove first player after adding two results in map with remaining player" do
scores =
HighScore.new()
|> HighScore.add_player("José Valim")
|> HighScore.add_player("Chris McCord")
|> HighScore.remove_player("José Valim")
assert scores == %{"Chris McCord" => 0}
end
@tag task_id: 3
test "remove second player after adding two results in map with remaining player" do
scores =
HighScore.new()
|> HighScore.add_player("José Valim")
|> HighScore.add_player("Chris McCord")
|> HighScore.remove_player("Chris McCord")
assert scores == %{"José Valim" => 0}
end
end
describe "reset_score/2" do
@tag task_id: 4
test "resetting score for non-existent player sets player score to 0" do
scores =
HighScore.new()
|> HighScore.reset_score("José Valim")
assert scores == %{"José Valim" => 0}
end
@tag task_id: 4
test "resetting score for existing player sets previous player score to 0" do
scores =
HighScore.new()
|> HighScore.add_player("José Valim", 486_373)
|> HighScore.reset_score("José Valim")
assert scores == %{"José Valim" => 0}
end
end
describe "update_score/3" do
@tag task_id: 5
test "update score for non existent player initializes value" do
scores =
HighScore.new()
|> HighScore.update_score("José Valim", 486_373)
assert scores == %{"José Valim" => 486_373}
end
@tag task_id: 5
test "update score for existing player adds score to previous" do
scores =
HighScore.new()
|> HighScore.add_player("José Valim")
|> HighScore.update_score("José Valim", 486_373)
assert scores == %{"José Valim" => 486_373}
end
@tag task_id: 5
test "update score for existing player with non-zero score adds score to previous" do
scores =
HighScore.new()
|> HighScore.add_player("José Valim")
|> HighScore.update_score("José Valim", 1)
|> HighScore.update_score("José Valim", 486_373)
assert scores == %{"José Valim" => 486_374}
end
end
describe "get_players/1" do
@tag task_id: 6
test "empty score map gives empty list" do
scores_by_player =
HighScore.new()
|> HighScore.get_players()
assert scores_by_player == []
end
@tag task_id: 6
test "score map with one entry gives one result" do
players =
HighScore.new()
|> HighScore.add_player("José Valim")
|> HighScore.update_score("José Valim", 486_373)
|> HighScore.get_players()
assert players == ["José Valim"]
end
@tag task_id: 6
test "score map with multiple entries gives results in unknown order" do
players =
HighScore.new()
|> HighScore.add_player("José Valim", 486_373)
|> HighScore.add_player("Dave Thomas", 2_374)
|> HighScore.add_player("Chris McCord", 0)
|> HighScore.add_player("Saša Jurić", 762)
|> HighScore.get_players()
|> Enum.sort()
assert players == [
"Chris McCord",
"Dave Thomas",
"José Valim",
"Saša Jurić"
]
end
end
end
| 27.647059 | 89 | 0.603675 |
ff60f53c9d5d7039d88a10ff86127ce88af0fb26 | 260 | exs | Elixir | test/test_helper.exs | ashneyderman/ecto | 16f27f64c5ca2480568fad10e40c26522ffbf793 | [
"Apache-2.0"
] | null | null | null | test/test_helper.exs | ashneyderman/ecto | 16f27f64c5ca2480568fad10e40c26522ffbf793 | [
"Apache-2.0"
] | null | null | null | test/test_helper.exs | ashneyderman/ecto | 16f27f64c5ca2480568fad10e40c26522ffbf793 | [
"Apache-2.0"
] | null | null | null | # For tasks/generators testing
Mix.start()
Mix.shell(Mix.Shell.Process)
System.put_env("ECTO_EDITOR", "")
Logger.configure(level: :info)
# Commonly used support feature
Code.require_file "../integration_test/support/file_helpers.exs", __DIR__
ExUnit.start()
| 23.636364 | 73 | 0.776923 |
ff612e4a8cb277a4c3f0d22f855dec711c54c94a | 264 | exs | Elixir | lib/elixir/test/elixir/node_test.exs | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | 1 | 2015-02-23T00:01:48.000Z | 2015-02-23T00:01:48.000Z | lib/elixir/test/elixir/node_test.exs | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/node_test.exs | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | 1 | 2020-12-07T08:04:16.000Z | 2020-12-07T08:04:16.000Z | Code.require_file "test_helper.exs", __DIR__
defmodule NodeTest do
use ExUnit.Case
test "start/3 and stop/0" do
assert Node.stop == {:error, :not_found}
assert {:ok, _} = Node.start(:hello, :shortnames, 15000)
assert Node.stop() == :ok
end
end
| 22 | 60 | 0.670455 |
ff61384309f481826a31745360e89de631701048 | 1,717 | ex | Elixir | clients/app_engine/lib/google_api/app_engine/v1/model/error_handler.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/app_engine/lib/google_api/app_engine/v1/model/error_handler.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/app_engine/lib/google_api/app_engine/v1/model/error_handler.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AppEngine.V1.Model.ErrorHandler do
@moduledoc """
Custom static error page to be served when an error occurs.
## Attributes
- errorCode (String): Error condition this handler applies to. Defaults to: `null`.
- Enum - one of [ERROR_CODE_UNSPECIFIED, ERROR_CODE_DEFAULT, ERROR_CODE_OVER_QUOTA, ERROR_CODE_DOS_API_DENIAL, ERROR_CODE_TIMEOUT]
- mimeType (String): MIME type of file. Defaults to text/html. Defaults to: `null`.
- staticFile (String): Static file content to be served for this error. Defaults to: `null`.
"""
defstruct [
:"errorCode",
:"mimeType",
:"staticFile"
]
end
defimpl Poison.Decoder, for: GoogleApi.AppEngine.V1.Model.ErrorHandler do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.AppEngine.V1.Model.ErrorHandler do
def encode(value, options) do
GoogleApi.AppEngine.V1.Deserializer.serialize_non_nil(value, options)
end
end
| 33.666667 | 134 | 0.750146 |
ff6176d9cb702900104fa05c60050fcb52c32153 | 2,122 | exs | Elixir | config/dev.exs | hassanshaikley/ascii-quest | 8b3b3af3b9c6c59b8155fe2e6cb2a794033c29f1 | [
"MIT"
] | 8 | 2019-01-15T09:31:05.000Z | 2020-04-20T13:59:01.000Z | config/dev.exs | hassanshaikley/ascii-quest | 8b3b3af3b9c6c59b8155fe2e6cb2a794033c29f1 | [
"MIT"
] | 2 | 2019-01-06T02:53:44.000Z | 2019-05-30T20:30:20.000Z | config/dev.exs | hassanshaikley/ascii-quest | 8b3b3af3b9c6c59b8155fe2e6cb2a794033c29f1 | [
"MIT"
] | 1 | 2019-01-04T04:37:21.000Z | 2019-01-04T04:37:21.000Z | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :stabby_flies, StabbyFliesWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch-stdin",
cd: Path.expand("../assets", __DIR__)
]
]
# Configure your database
config :stabby_flies, StabbyFlies.Repo,
username: "postgres",
password: "postgres",
database: "stabby_flies_Dev",
hostname: "localhost",
pool_size: 10
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :stabby_flies, StabbyFliesWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/stabby_flies_web/views/.*(ex)$},
~r{lib/stabby_flies_web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 27.921053 | 68 | 0.692743 |
ff61a1e0430d3d493ebaf3bda30c5bd2624e901c | 11,243 | exs | Elixir | test/protobuf/wire_test.exs | zolakeith/protobuf | 2d412b260c48be8f90e05408f8569cef2f6d3ace | [
"MIT"
] | 419 | 2017-04-02T13:10:51.000Z | 2020-11-15T15:53:17.000Z | test/protobuf/wire_test.exs | zolakeith/protobuf | 2d412b260c48be8f90e05408f8569cef2f6d3ace | [
"MIT"
] | 101 | 2020-11-22T20:20:11.000Z | 2022-03-06T16:09:26.000Z | test/protobuf/wire_test.exs | zolakeith/protobuf | 2d412b260c48be8f90e05408f8569cef2f6d3ace | [
"MIT"
] | 83 | 2017-07-24T21:50:04.000Z | 2020-11-15T08:52:34.000Z | defmodule Protobuf.WireTest do
use ExUnit.Case, async: true
alias Protobuf.Wire
describe "encode/2" do
test "varint" do
assert encode(:int32, 42) == <<42>>
end
test "min int32" do
assert encode(:int32, -2_147_483_648) ==
<<128, 128, 128, 128, 248, 255, 255, 255, 255, 1>>
end
test "min int64" do
assert encode(:int64, -9_223_372_036_854_775_808) ==
<<128, 128, 128, 128, 128, 128, 128, 128, 128, 1>>
end
test "min sint32" do
assert encode(:sint32, -2_147_483_648) == <<255, 255, 255, 255, 15>>
end
test "max sint32" do
assert encode(:sint32, 2_147_483_647) == <<254, 255, 255, 255, 15>>
end
test "min sint64" do
assert encode(:sint64, -9_223_372_036_854_775_808) ==
<<255, 255, 255, 255, 255, 255, 255, 255, 255, 1>>
end
test "max sint64" do
assert encode(:sint64, 9_223_372_036_854_775_807) ==
<<254, 255, 255, 255, 255, 255, 255, 255, 255, 1>>
end
test "bool false" do
assert encode(:bool, false) == <<0>>
end
test "bool true" do
assert encode(:bool, true) == <<1>>
end
test "enum atom and alias" do
assert encode({:enum, TestMsg.EnumFoo}, :C) == <<4>>
assert encode({:enum, TestMsg.EnumFoo}, :D) == <<4>>
end
test "enum known and unknown integer" do
assert encode({:enum, TestMsg.EnumFoo}, 1) == <<1>>
assert encode({:enum, TestMsg.EnumFoo}, 5) == <<5>>
end
test "wrong enum" do
assert_raise Protobuf.TypeEncodeError, fn ->
encode({:enum, TestMsg.EnumFoo}, "invalid")
end
assert_raise Protobuf.TypeEncodeError, fn ->
encode({:enum, TestMsg.EnumFoo}, 0.1)
end
end
test "a fixed64" do
assert encode(:fixed64, 8_446_744_073_709_551_615) ==
<<255, 255, 23, 118, 251, 220, 56, 117>>
end
test "max fixed64" do
assert encode(:fixed64, 18_446_744_073_709_551_615) ==
<<255, 255, 255, 255, 255, 255, 255, 255>>
end
test "min sfixed64" do
assert encode(:sfixed64, -9_223_372_036_854_775_808) ==
<<0, 0, 0, 0, 0, 0, 0, 128>>
end
test "max sfixed64" do
assert encode(:sfixed64, 9_223_372_036_854_775_807) ==
<<255, 255, 255, 255, 255, 255, 255, 127>>
end
test "min double" do
assert encode(:double, 5.0e-324) == <<1, 0, 0, 0, 0, 0, 0, 0>>
end
test "max double" do
assert encode(:double, 1.7976931348623157e308) == <<255, 255, 255, 255, 255, 255, 239, 127>>
end
test "int as double" do
assert encode(:double, -9_223_372_036_854_775_808) == <<0, 0, 0, 0, 0, 0, 224, 195>>
end
test "string" do
assert encode(:string, "testing") == <<7, 116, 101, 115, 116, 105, 110, 103>>
end
test "bytes" do
assert encode(:bytes, <<42, 43, 44, 45>>) == <<4, 42, 43, 44, 45>>
end
test "fixed32" do
assert encode(:fixed32, 4_294_967_295) == <<255, 255, 255, 255>>
end
test "sfixed32" do
assert encode(:sfixed32, 2_147_483_647) == <<255, 255, 255, 127>>
end
test "float" do
assert encode(:float, 3.4028234663852886e38) == <<255, 255, 127, 127>>
end
test "int as float" do
assert encode(:float, 3) == <<0, 0, 64, 64>>
end
test "float infinity/-infinity/nan" do
Enum.each([:infinity, :negative_infinity, :nan], fn f ->
bin = encode(:float, f)
assert f == Wire.decode(:float, bin)
end)
end
test "double infinity, -infinity, nan" do
Enum.each([:infinity, :negative_infinity, :nan], fn f ->
bin = encode(:double, f)
assert f == Wire.decode(:double, bin)
end)
end
test "wrong uint32" do
assert_raise Protobuf.TypeEncodeError, fn ->
encode(:uint32, 12_345_678_901_234_567_890)
end
assert_raise Protobuf.TypeEncodeError, fn ->
encode(:uint32, -1)
end
end
test "wrong uint64" do
assert_raise Protobuf.TypeEncodeError, fn ->
encode(:uint64, 184_467_440_737_095_516_150)
end
assert_raise Protobuf.TypeEncodeError, fn ->
encode(:uint64, -1)
end
end
test "wrong fixed32" do
assert_raise Protobuf.TypeEncodeError, fn ->
encode(:fixed32, 12_345_678_901_234_567_890)
end
assert_raise Protobuf.TypeEncodeError, fn ->
encode(:fixed32, -1)
end
end
test "wrong fixed64" do
assert_raise Protobuf.TypeEncodeError, fn ->
encode(:fixed64, 184_467_440_737_095_516_150)
end
assert_raise Protobuf.TypeEncodeError, fn ->
encode(:fixed64, -1)
end
end
test "wrong int32" do
assert_raise Protobuf.TypeEncodeError, fn ->
encode(:int32, 2_147_483_648)
end
assert_raise Protobuf.TypeEncodeError, fn ->
encode(:int32, -2_147_483_649)
end
end
test "wrong int64" do
assert_raise Protobuf.TypeEncodeError, fn ->
encode(:int64, 184_467_440_737_095_516_150)
end
assert_raise Protobuf.TypeEncodeError, fn ->
encode(:int64, -184_467_440_737_095_516_150)
end
end
test "wrong sint32" do
assert_raise Protobuf.TypeEncodeError, fn ->
encode(:sint32, 2_147_483_648)
end
assert_raise Protobuf.TypeEncodeError, fn ->
encode(:sint32, -2_147_483_649)
end
end
test "wrong sint64" do
assert_raise Protobuf.TypeEncodeError, fn ->
encode(:sint64, 184_467_440_737_095_516_150)
end
assert_raise Protobuf.TypeEncodeError, fn ->
encode(:sint64, -184_467_440_737_095_516_150)
end
end
test "wrong sfixed32" do
assert_raise Protobuf.TypeEncodeError, fn ->
encode(:sfixed32, 2_147_483_648)
end
assert_raise Protobuf.TypeEncodeError, fn ->
encode(:sfixed32, -2_147_483_649)
end
end
test "wrong sfixed64" do
assert_raise Protobuf.TypeEncodeError, fn ->
encode(:sfixed64, 184_467_440_737_095_516_150)
end
assert_raise Protobuf.TypeEncodeError, fn ->
encode(:sfixed64, -184_467_440_737_095_516_150)
end
end
defp encode(type, value) do
type
|> Wire.encode(value)
|> IO.iodata_to_binary()
end
end
describe "decode/2" do
test "varint" do
assert 42 == Wire.decode(:int32, 42)
end
test "int64" do
assert -1 == Wire.decode(:int64, -1)
end
test "min int32" do
assert -2_147_483_648 == Wire.decode(:int32, 18_446_744_071_562_067_968)
end
test "max int32" do
assert -2_147_483_647 == Wire.decode(:int32, 18_446_744_071_562_067_969)
end
test "min int64" do
assert -9_223_372_036_854_775_808 == Wire.decode(:int64, 9_223_372_036_854_775_808)
end
test "max int64" do
assert 9_223_372_036_854_775_807 == Wire.decode(:int64, 9_223_372_036_854_775_807)
end
test "min sint32" do
assert -2_147_483_648 == Wire.decode(:sint32, 4_294_967_295)
end
test "max sint32" do
assert 2_147_483_647 == Wire.decode(:sint32, 4_294_967_294)
end
test "overflowing sint32" do
assert Wire.decode(:sint32, Wire.Zigzag.encode(2_147_483_647 + 2)) == 1
end
test "min sint64" do
assert -9_223_372_036_854_775_808 == Wire.decode(:sint64, 18_446_744_073_709_551_615)
end
test "max sint64" do
assert 9_223_372_036_854_775_807 == Wire.decode(:sint64, 18_446_744_073_709_551_614)
end
test "max uint32" do
assert 4_294_967_295 == Wire.decode(:uint32, 4_294_967_295)
end
test "max uint64" do
assert 9_223_372_036_854_775_807 == Wire.decode(:uint64, 9_223_372_036_854_775_807)
end
test "bool works" do
assert true == Wire.decode(:bool, 1)
assert false == Wire.decode(:bool, 0)
end
test "enum known and unknown integer" do
assert :A == Wire.decode({:enum, TestMsg.EnumFoo}, 1)
assert 5 == Wire.decode({:enum, TestMsg.EnumFoo}, 5)
end
test "enum wraps to an int32" do
max_int32 = 0x7FFFFFFFFFFFFFFF
assert Wire.decode({:enum, TestMsg.EnumFoo}, max_int32 + 2) == :A
end
test "a fixed64" do
assert 8_446_744_073_709_551_615 ==
Wire.decode(:fixed64, <<255, 255, 23, 118, 251, 220, 56, 117>>)
end
test "max fixed64" do
assert 18_446_744_073_709_551_615 ==
Wire.decode(:fixed64, <<255, 255, 255, 255, 255, 255, 255, 255>>)
end
test "min sfixed64" do
assert -9_223_372_036_854_775_808 == Wire.decode(:sfixed64, <<0, 0, 0, 0, 0, 0, 0, 128>>)
end
test "max sfixed64" do
assert 9_223_372_036_854_775_807 ==
Wire.decode(:sfixed64, <<255, 255, 255, 255, 255, 255, 255, 127>>)
end
test "min double" do
assert 5.0e-324 == Wire.decode(:double, <<1, 0, 0, 0, 0, 0, 0, 0>>)
end
test "max double" do
assert 1.7976931348623157e308 ==
Wire.decode(:double, <<255, 255, 255, 255, 255, 255, 239, 127>>)
end
test "string" do
assert "testing" == Wire.decode(:string, <<116, 101, 115, 116, 105, 110, 103>>)
end
test "bytes" do
assert <<42, 43, 44, 45>> == Wire.decode(:bytes, <<42, 43, 44, 45>>)
end
test "fixed32" do
assert 4_294_967_295 == Wire.decode(:fixed32, <<255, 255, 255, 255>>)
end
test "sfixed32" do
assert 2_147_483_647 == Wire.decode(:sfixed32, <<255, 255, 255, 127>>)
end
test "float" do
assert 3.4028234663852886e38 == Wire.decode(:float, <<255, 255, 127, 127>>)
end
test "float infinity, -infinity, nan" do
assert :infinity == Wire.decode(:float, <<0, 0, 128, 127>>)
assert :negative_infinity == Wire.decode(:float, <<0, 0, 128, 255>>)
assert :nan == Wire.decode(:float, <<0, 0, 192, 127>>)
end
test "double infinity, -infinity, nan" do
assert :infinity == Wire.decode(:double, <<0, 0, 0, 0, 0, 0, 240, 127>>)
assert :negative_infinity == Wire.decode(:double, <<0, 0, 0, 0, 0, 0, 240, 255>>)
assert :nan == Wire.decode(:double, <<1, 0, 0, 0, 0, 0, 248, 127>>)
end
test "mismatching fixed-length sizes" do
msg = "can't decode <<0, 0, 0>> into type :fixed32"
assert_raise Protobuf.DecodeError, msg, fn ->
Wire.decode(:fixed32, <<0, 0, 0>>)
end
msg = "can't decode <<0, 0, 0, 0, 0>> into type :fixed32"
assert_raise Protobuf.DecodeError, msg, fn ->
Wire.decode(:fixed32, <<0, 0, 0, 0, 0>>)
end
msg = "can't decode <<0, 0, 0, 0, 0, 0, 0>> into type :fixed64"
assert_raise Protobuf.DecodeError, msg, fn ->
Wire.decode(:fixed64, <<0, 0, 0, 0, 0, 0, 0>>)
end
msg = "can't decode <<0, 0, 0, 0, 0, 0, 0, 0, 0>> into type :fixed64"
assert_raise Protobuf.DecodeError, msg, fn ->
Wire.decode(:fixed64, <<0, 0, 0, 0, 0, 0, 0, 0, 0>>)
end
end
test "Protobuf.DecodeError when unknown or mismatched type" do
msg = "can't decode <<0>> into type {:enum, \"invalid\"}"
assert_raise Protobuf.DecodeError, msg, fn ->
Wire.decode({:enum, "invalid"}, <<0>>)
end
end
end
end
| 27.624079 | 98 | 0.597883 |
ff61a9a25e7fe8cdf8e0934c63e5da4fee72461b | 349 | exs | Elixir | config/config.exs | florinpatrascu/callisto_demo | 4d0a7c2d491233a18c075eaa4fc3fa37b581334f | [
"Apache-2.0"
] | null | null | null | config/config.exs | florinpatrascu/callisto_demo | 4d0a7c2d491233a18c075eaa4fc3fa37b581334f | [
"Apache-2.0"
] | null | null | null | config/config.exs | florinpatrascu/callisto_demo | 4d0a7c2d491233a18c075eaa4fc3fa37b581334f | [
"Apache-2.0"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
config :callisto_demo, CallistoDemo.Graph,
adapter: Callisto.Adapters.Neo4j,
url: "http://localhost:7474",
pool_size: 5,
max_overflow: 2,
timeout: 30
if Mix.env == :test, do: import_config "test.exs"
| 26.846154 | 61 | 0.744986 |
ff61d2e2975b3ebf84d51ecc262952e67a6085c2 | 10,581 | ex | Elixir | clients/ad_sense/lib/google_api/ad_sense/v14/api/reports.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/ad_sense/lib/google_api/ad_sense/v14/api/reports.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/ad_sense/lib/google_api/ad_sense/v14/api/reports.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AdSense.V14.Api.Reports do
@moduledoc """
API calls for all endpoints tagged `Reports`.
"""
alias GoogleApi.AdSense.V14.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Generate an AdSense report based on the report request sent in the query parameters. Returns the result as JSON; to retrieve output in CSV format specify "alt=csv" as a query parameter.
## Parameters
* `connection` (*type:* `GoogleApi.AdSense.V14.Connection.t`) - Connection to server
* `start_date` (*type:* `String.t`) - Start of the date range to report on in "YYYY-MM-DD" format, inclusive.
* `end_date` (*type:* `String.t`) - End of the date range to report on in "YYYY-MM-DD" format, inclusive.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:accountId` (*type:* `list(String.t)`) - Accounts upon which to report.
* `:currency` (*type:* `String.t`) - Optional currency to use when reporting on monetary metrics. Defaults to the account's currency if not set.
* `:dimension` (*type:* `list(String.t)`) - Dimensions to base the report on.
* `:filter` (*type:* `list(String.t)`) - Filters to be run on the report.
* `:locale` (*type:* `String.t`) - Optional locale to use for translating report output to a local language. Defaults to "en_US" if not specified.
* `:maxResults` (*type:* `integer()`) - The maximum number of rows of report data to return.
* `:metric` (*type:* `list(String.t)`) - Numeric columns to include in the report.
* `:sort` (*type:* `list(String.t)`) - The name of a dimension or metric to sort the resulting report on, optionally prefixed with "+" to sort ascending or "-" to sort descending. If no prefix is specified, the column is sorted ascending.
* `:startIndex` (*type:* `integer()`) - Index of the first row of report data to return.
* `:useTimezoneReporting` (*type:* `boolean()`) - Whether the report should be generated in the AdSense account's local timezone. If false default PST/PDT timezone will be used.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AdSense.V14.Model.AdsenseReportsGenerateResponse{}}` on success
* `{:error, info}` on failure
"""
@spec adsense_reports_generate(Tesla.Env.client(), String.t(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.AdSense.V14.Model.AdsenseReportsGenerateResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def adsense_reports_generate(
connection,
start_date,
end_date,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:accountId => :query,
:currency => :query,
:dimension => :query,
:filter => :query,
:locale => :query,
:maxResults => :query,
:metric => :query,
:sort => :query,
:startIndex => :query,
:useTimezoneReporting => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/reports", %{})
|> Request.add_param(:query, :startDate, start_date)
|> Request.add_param(:query, :endDate, end_date)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.AdSense.V14.Model.AdsenseReportsGenerateResponse{}]
)
end
@doc """
Generate an AdSense report based on the saved report ID sent in the query parameters.
## Parameters
* `connection` (*type:* `GoogleApi.AdSense.V14.Connection.t`) - Connection to server
* `saved_report_id` (*type:* `String.t`) - The saved report to retrieve.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:locale` (*type:* `String.t`) - Optional locale to use for translating report output to a local language. Defaults to "en_US" if not specified.
* `:maxResults` (*type:* `integer()`) - The maximum number of rows of report data to return.
* `:startIndex` (*type:* `integer()`) - Index of the first row of report data to return.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AdSense.V14.Model.AdsenseReportsGenerateResponse{}}` on success
* `{:error, info}` on failure
"""
@spec adsense_reports_saved_generate(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.AdSense.V14.Model.AdsenseReportsGenerateResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def adsense_reports_saved_generate(
connection,
saved_report_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:locale => :query,
:maxResults => :query,
:startIndex => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/reports/{savedReportId}", %{
"savedReportId" => URI.encode(saved_report_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.AdSense.V14.Model.AdsenseReportsGenerateResponse{}]
)
end
@doc """
List all saved reports in this AdSense account.
## Parameters
* `connection` (*type:* `GoogleApi.AdSense.V14.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:maxResults` (*type:* `integer()`) - The maximum number of saved reports to include in the response, used for paging.
* `:pageToken` (*type:* `String.t`) - A continuation token, used to page through saved reports. To retrieve the next page, set this parameter to the value of "nextPageToken" from the previous response.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AdSense.V14.Model.SavedReports{}}` on success
* `{:error, info}` on failure
"""
@spec adsense_reports_saved_list(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.AdSense.V14.Model.SavedReports.t()}
| {:ok, Tesla.Env.t()}
| {:error, Tesla.Env.t()}
def adsense_reports_saved_list(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:maxResults => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/reports/saved", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.AdSense.V14.Model.SavedReports{}])
end
end
| 47.236607 | 246 | 0.641244 |
ff61eaaccf5d46b266ceed3604dfc5a1ca91b425 | 4,980 | exs | Elixir | test/prepay_and_use_web/controllers/user_settings_controller_test.exs | manojsamanta/stripe-prepay-and-use | 511b33cd93d619052fe0f6ecbf4374474f6a96e6 | [
"MIT"
] | null | null | null | test/prepay_and_use_web/controllers/user_settings_controller_test.exs | manojsamanta/stripe-prepay-and-use | 511b33cd93d619052fe0f6ecbf4374474f6a96e6 | [
"MIT"
] | null | null | null | test/prepay_and_use_web/controllers/user_settings_controller_test.exs | manojsamanta/stripe-prepay-and-use | 511b33cd93d619052fe0f6ecbf4374474f6a96e6 | [
"MIT"
] | null | null | null | defmodule PrepayAndUseWeb.UserSettingsControllerTest do
use PrepayAndUseWeb.ConnCase, async: true
alias PrepayAndUse.Accounts
import PrepayAndUse.AccountsFixtures
setup :register_and_log_in_user
describe "GET /users/settings" do
test "renders settings page", %{conn: conn} do
conn = get(conn, Routes.user_settings_path(conn, :edit))
response = html_response(conn, 200)
assert response =~ "<h1>Settings</h1>"
end
test "redirects if user is not logged in" do
conn = build_conn()
conn = get(conn, Routes.user_settings_path(conn, :edit))
assert redirected_to(conn) == Routes.user_session_path(conn, :new)
end
end
describe "PUT /users/settings (change password form)" do
test "updates the user password and resets tokens", %{conn: conn, user: user} do
new_password_conn =
put(conn, Routes.user_settings_path(conn, :update), %{
"action" => "update_password",
"current_password" => valid_user_password(),
"user" => %{
"password" => "new valid password",
"password_confirmation" => "new valid password"
}
})
assert redirected_to(new_password_conn) == Routes.user_settings_path(conn, :edit)
assert get_session(new_password_conn, :user_token) != get_session(conn, :user_token)
assert get_flash(new_password_conn, :info) =~ "Password updated successfully"
assert Accounts.get_user_by_email_and_password(user.email, "new valid password")
end
test "does not update password on invalid data", %{conn: conn} do
old_password_conn =
put(conn, Routes.user_settings_path(conn, :update), %{
"action" => "update_password",
"current_password" => "invalid",
"user" => %{
"password" => "too short",
"password_confirmation" => "does not match"
}
})
response = html_response(old_password_conn, 200)
assert response =~ "<h1>Settings</h1>"
assert response =~ "should be at least 12 character(s)"
assert response =~ "does not match password"
assert response =~ "is not valid"
assert get_session(old_password_conn, :user_token) == get_session(conn, :user_token)
end
end
describe "PUT /users/settings (change email form)" do
@tag :capture_log
test "updates the user email", %{conn: conn, user: user} do
conn =
put(conn, Routes.user_settings_path(conn, :update), %{
"action" => "update_email",
"current_password" => valid_user_password(),
"user" => %{"email" => unique_user_email()}
})
assert redirected_to(conn) == Routes.user_settings_path(conn, :edit)
assert get_flash(conn, :info) =~ "A link to confirm your email"
assert Accounts.get_user_by_email(user.email)
end
test "does not update email on invalid data", %{conn: conn} do
conn =
put(conn, Routes.user_settings_path(conn, :update), %{
"action" => "update_email",
"current_password" => "invalid",
"user" => %{"email" => "with spaces"}
})
response = html_response(conn, 200)
assert response =~ "<h1>Settings</h1>"
assert response =~ "must have the @ sign and no spaces"
assert response =~ "is not valid"
end
end
describe "GET /users/settings/confirm_email/:token" do
setup %{user: user} do
email = unique_user_email()
token =
extract_user_token(fn url ->
Accounts.deliver_update_email_instructions(%{user | email: email}, user.email, url)
end)
%{token: token, email: email}
end
test "updates the user email once", %{conn: conn, user: user, token: token, email: email} do
conn = get(conn, Routes.user_settings_path(conn, :confirm_email, token))
assert redirected_to(conn) == Routes.user_settings_path(conn, :edit)
assert get_flash(conn, :info) =~ "Email changed successfully"
refute Accounts.get_user_by_email(user.email)
assert Accounts.get_user_by_email(email)
conn = get(conn, Routes.user_settings_path(conn, :confirm_email, token))
assert redirected_to(conn) == Routes.user_settings_path(conn, :edit)
assert get_flash(conn, :error) =~ "Email change link is invalid or it has expired"
end
test "does not update email with invalid token", %{conn: conn, user: user} do
conn = get(conn, Routes.user_settings_path(conn, :confirm_email, "oops"))
assert redirected_to(conn) == Routes.user_settings_path(conn, :edit)
assert get_flash(conn, :error) =~ "Email change link is invalid or it has expired"
assert Accounts.get_user_by_email(user.email)
end
test "redirects if user is not logged in", %{token: token} do
conn = build_conn()
conn = get(conn, Routes.user_settings_path(conn, :confirm_email, token))
assert redirected_to(conn) == Routes.user_session_path(conn, :new)
end
end
end
| 38.307692 | 96 | 0.652209 |
ff620350fbc2b00a8cb1bcdcfdb4bdd9979aba5e | 4,129 | ex | Elixir | apps/language_server/lib/language_server/providers/code_lens/type_spec.ex | SerenityIK/elixir-ls | 9569197be87809c241360a6ef1f5a9fffd25ab74 | [
"Apache-2.0"
] | null | null | null | apps/language_server/lib/language_server/providers/code_lens/type_spec.ex | SerenityIK/elixir-ls | 9569197be87809c241360a6ef1f5a9fffd25ab74 | [
"Apache-2.0"
] | null | null | null | apps/language_server/lib/language_server/providers/code_lens/type_spec.ex | SerenityIK/elixir-ls | 9569197be87809c241360a6ef1f5a9fffd25ab74 | [
"Apache-2.0"
] | null | null | null | defmodule ElixirLS.LanguageServer.Providers.CodeLens.TypeSpec do
@moduledoc """
Collects the success typings inferred by Dialyzer, translates the syntax to Elixir, and shows them
inline in the editor as @spec suggestions.
The server, unfortunately, has no way to force the client to refresh the @spec code lenses when new
success typings, so we let this request block until we know we have up-to-date results from
Dialyzer. We rely on the client being able to await this result while still making other requests
in parallel. If the client is unable to perform requests in parallel, the client or user should
disable this feature.
"""
alias ElixirLS.LanguageServer.Providers.CodeLens
alias ElixirLS.LanguageServer.{Server, SourceFile}
alias Erl2ex.Convert.{Context, ErlForms}
alias Erl2ex.Pipeline.{Parse, ModuleData, ExSpec}
defmodule ContractTranslator do
def translate_contract(fun, contract, is_macro) do
# FIXME: Private module
{[%ExSpec{specs: [spec]} | _], _} =
"-spec foo#{contract}."
# FIXME: Private module
|> Parse.string()
|> hd()
|> elem(0)
# FIXME: Private module
|> ErlForms.conv_form(%Context{
in_type_expr: true,
# FIXME: Private module
module_data: %ModuleData{}
})
spec
|> Macro.postwalk(&tweak_specs/1)
|> drop_macro_env(is_macro)
|> Macro.to_string()
|> String.replace("()", "")
|> Code.format_string!(line_length: :infinity)
|> IO.iodata_to_binary()
|> String.replace_prefix("foo", to_string(fun))
end
defp tweak_specs({:list, _meta, args}) do
case args do
[{:{}, _, [{:atom, _, []}, {wild, _, _}]}] when wild in [:_, :any] -> quote do: keyword()
list -> list
end
end
defp tweak_specs({:nonempty_list, _meta, args}) do
case args do
[{:any, _, []}] -> quote do: [...]
_ -> args ++ quote do: [...]
end
end
defp tweak_specs({:%{}, _meta, fields}) do
fields =
Enum.map(fields, fn
{:map_field_exact, _, [key, value]} -> {key, value}
{key, value} -> quote do: {optional(unquote(key)), unquote(value)}
field -> field
end)
|> Enum.reject(&match?({{:optional, _, [{:any, _, []}]}, {:any, _, []}}, &1))
fields
|> Enum.find_value(fn
{:__struct__, struct_type} when is_atom(struct_type) -> struct_type
_ -> nil
end)
|> case do
nil -> {:%{}, [], fields}
struct_type -> {{:., [], [struct_type, :t]}, [], []}
end
end
# Undo conversion of _ to any() when inside binary spec
defp tweak_specs({:<<>>, _, children}) do
children =
Macro.postwalk(children, fn
{:any, _, []} -> quote do: _
other -> other
end)
{:<<>>, [], children}
end
defp tweak_specs({:_, _, _}) do
quote do: any()
end
defp tweak_specs({:when, [], [spec, substitutions]}) do
substitutions = Enum.reject(substitutions, &match?({:_, {:any, _, []}}, &1))
case substitutions do
[] -> spec
_ -> {:when, [], [spec, substitutions]}
end
end
defp tweak_specs(node) do
node
end
defp drop_macro_env(ast, false), do: ast
defp drop_macro_env({:"::", [], [{:foo, [], [_env | rest]}, res]}, true) do
{:"::", [], [{:foo, [], rest}, res]}
end
end
def code_lens(server_instance_id, uri, text) do
resp =
for {_, line, {mod, fun, arity}, contract, is_macro} <- Server.suggest_contracts(uri),
SourceFile.function_def_on_line?(text, line, fun),
spec = ContractTranslator.translate_contract(fun, contract, is_macro) do
CodeLens.build_code_lens(
line,
"@spec #{spec}",
"spec:#{server_instance_id}",
%{
"uri" => uri,
"mod" => to_string(mod),
"fun" => to_string(fun),
"arity" => arity,
"spec" => spec,
"line" => line
}
)
end
{:ok, resp}
end
end
| 30.360294 | 101 | 0.562364 |
ff6206e936d15253ab12fad16d4d7bfcb48673d5 | 11,728 | ex | Elixir | clients/digital_asset_links/lib/google_api/digital_asset_links/v1/api/assetlinks.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/digital_asset_links/lib/google_api/digital_asset_links/v1/api/assetlinks.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/digital_asset_links/lib/google_api/digital_asset_links/v1/api/assetlinks.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DigitalAssetLinks.V1.Api.Assetlinks do
@moduledoc """
API calls for all endpoints tagged `Assetlinks`.
"""
alias GoogleApi.DigitalAssetLinks.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Determines whether the specified (directional) relationship exists between
the specified source and target assets.
The relation describes the intent of the link between the two assets as
claimed by the source asset. An example for such relationships is the
delegation of privileges or permissions.
This command is most often used by infrastructure systems to check
preconditions for an action. For example, a client may want to know if it
is OK to send a web URL to a particular mobile app instead. The client can
check for the relevant asset link from the website to the mobile app to
decide if the operation should be allowed.
A note about security: if you specify a secure asset as the source, such as
an HTTPS website or an Android app, the API will ensure that any
statements used to generate the response have been made in a secure way by
the owner of that asset. Conversely, if the source asset is an insecure
HTTP website (that is, the URL starts with `http://` instead of
`https://`), the API cannot verify its statements securely, and it is not
possible to ensure that the website's statements have not been altered by a
third party. For more information, see the [Digital Asset Links technical
design
specification](https://github.com/google/digitalassetlinks/blob/master/well-known/details.md).
## Parameters
* `connection` (*type:* `GoogleApi.DigitalAssetLinks.V1.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:relation` (*type:* `String.t`) - Query string for the relation.
We identify relations with strings of the format `<kind>/<detail>`, where
`<kind>` must be one of a set of pre-defined purpose categories, and
`<detail>` is a free-form lowercase alphanumeric string that describes the
specific use case of the statement.
Refer to [our API documentation](/digital-asset-links/v1/relation-strings)
for the current list of supported relations.
For a query to match an asset link, both the query's and the asset link's
relation strings must match exactly.
Example: A query with relation `delegate_permission/common.handle_all_urls`
matches an asset link with relation
`delegate_permission/common.handle_all_urls`.
* `:"source.androidApp.certificate.sha256Fingerprint"` (*type:* `String.t`) - The uppercase SHA-265 fingerprint of the certificate. From the PEM
certificate, it can be acquired like this:
$ keytool -printcert -file $CERTFILE | grep SHA256:
SHA256: 14:6D:E9:83:C5:73:06:50:D8:EE:B9:95:2F:34:FC:64:16:A0:83: \\
42:E6:1D:BE:A8:8A:04:96:B2:3F:CF:44:E5
or like this:
$ openssl x509 -in $CERTFILE -noout -fingerprint -sha256
SHA256 Fingerprint=14:6D:E9:83:C5:73:06:50:D8:EE:B9:95:2F:34:FC:64: \\
16:A0:83:42:E6:1D:BE:A8:8A:04:96:B2:3F:CF:44:E5
In this example, the contents of this field would be `14:6D:E9:83:C5:73:
06:50:D8:EE:B9:95:2F:34:FC:64:16:A0:83:42:E6:1D:BE:A8:8A:04:96:B2:3F:CF:
44:E5`.
If these tools are not available to you, you can convert the PEM
certificate into the DER format, compute the SHA-256 hash of that string
and represent the result as a hexstring (that is, uppercase hexadecimal
representations of each octet, separated by colons).
* `:"source.androidApp.packageName"` (*type:* `String.t`) - Android App assets are naturally identified by their Java package name.
For example, the Google Maps app uses the package name
`com.google.android.apps.maps`.
REQUIRED
* `:"source.web.site"` (*type:* `String.t`) - Web assets are identified by a URL that contains only the scheme, hostname
and port parts. The format is
http[s]://<hostname>[:<port>]
Hostnames must be fully qualified: they must end in a single period
("`.`").
Only the schemes "http" and "https" are currently allowed.
Port numbers are given as a decimal number, and they must be omitted if the
standard port numbers are used: 80 for http and 443 for https.
We call this limited URL the "site". All URLs that share the same scheme,
hostname and port are considered to be a part of the site and thus belong
to the web asset.
Example: the asset with the site `https://www.google.com` contains all
these URLs:
* `https://www.google.com/`
* `https://www.google.com:443/`
* `https://www.google.com/foo`
* `https://www.google.com/foo?bar`
* `https://www.google.com/foo#bar`
* `https://user@password:www.google.com/`
But it does not contain these URLs:
* `http://www.google.com/` (wrong scheme)
* `https://google.com/` (hostname does not match)
* `https://www.google.com:444/` (port does not match)
REQUIRED
* `:"target.androidApp.certificate.sha256Fingerprint"` (*type:* `String.t`) - The uppercase SHA-265 fingerprint of the certificate. From the PEM
certificate, it can be acquired like this:
$ keytool -printcert -file $CERTFILE | grep SHA256:
SHA256: 14:6D:E9:83:C5:73:06:50:D8:EE:B9:95:2F:34:FC:64:16:A0:83: \\
42:E6:1D:BE:A8:8A:04:96:B2:3F:CF:44:E5
or like this:
$ openssl x509 -in $CERTFILE -noout -fingerprint -sha256
SHA256 Fingerprint=14:6D:E9:83:C5:73:06:50:D8:EE:B9:95:2F:34:FC:64: \\
16:A0:83:42:E6:1D:BE:A8:8A:04:96:B2:3F:CF:44:E5
In this example, the contents of this field would be `14:6D:E9:83:C5:73:
06:50:D8:EE:B9:95:2F:34:FC:64:16:A0:83:42:E6:1D:BE:A8:8A:04:96:B2:3F:CF:
44:E5`.
If these tools are not available to you, you can convert the PEM
certificate into the DER format, compute the SHA-256 hash of that string
and represent the result as a hexstring (that is, uppercase hexadecimal
representations of each octet, separated by colons).
* `:"target.androidApp.packageName"` (*type:* `String.t`) - Android App assets are naturally identified by their Java package name.
For example, the Google Maps app uses the package name
`com.google.android.apps.maps`.
REQUIRED
* `:"target.web.site"` (*type:* `String.t`) - Web assets are identified by a URL that contains only the scheme, hostname
and port parts. The format is
http[s]://<hostname>[:<port>]
Hostnames must be fully qualified: they must end in a single period
("`.`").
Only the schemes "http" and "https" are currently allowed.
Port numbers are given as a decimal number, and they must be omitted if the
standard port numbers are used: 80 for http and 443 for https.
We call this limited URL the "site". All URLs that share the same scheme,
hostname and port are considered to be a part of the site and thus belong
to the web asset.
Example: the asset with the site `https://www.google.com` contains all
these URLs:
* `https://www.google.com/`
* `https://www.google.com:443/`
* `https://www.google.com/foo`
* `https://www.google.com/foo?bar`
* `https://www.google.com/foo#bar`
* `https://user@password:www.google.com/`
But it does not contain these URLs:
* `http://www.google.com/` (wrong scheme)
* `https://google.com/` (hostname does not match)
* `https://www.google.com:444/` (port does not match)
REQUIRED
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DigitalAssetLinks.V1.Model.CheckResponse{}}` on success
* `{:error, info}` on failure
"""
@spec digitalassetlinks_assetlinks_check(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.DigitalAssetLinks.V1.Model.CheckResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def digitalassetlinks_assetlinks_check(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:relation => :query,
:"source.androidApp.certificate.sha256Fingerprint" => :query,
:"source.androidApp.packageName" => :query,
:"source.web.site" => :query,
:"target.androidApp.certificate.sha256Fingerprint" => :query,
:"target.androidApp.packageName" => :query,
:"target.web.site" => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/assetlinks:check", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DigitalAssetLinks.V1.Model.CheckResponse{}])
end
end
| 47.869388 | 196 | 0.638984 |
ff6229932de5bdedc172fcc7ec4fa4a3272cd86b | 3,646 | ex | Elixir | lib/river/encoder.ex | peburrows/river | e8968535d02a86e70a7942a690c8e461fed55913 | [
"MIT"
] | 86 | 2016-08-19T21:59:28.000Z | 2022-01-31T20:14:18.000Z | lib/river/encoder.ex | peburrows/river | e8968535d02a86e70a7942a690c8e461fed55913 | [
"MIT"
] | 7 | 2016-09-27T14:44:16.000Z | 2017-08-08T14:57:45.000Z | lib/river/encoder.ex | peburrows/river | e8968535d02a86e70a7942a690c8e461fed55913 | [
"MIT"
] | 4 | 2016-09-26T10:57:24.000Z | 2018-04-03T14:30:19.000Z | defmodule River.Encoder do
require River.FrameTypes
alias River.{Frame, FrameTypes}
alias River.Frame.Settings
def encode(%Frame{} = frame, ctx \\ nil) do
frame
|> payload(ctx)
|> header
|> compile
end
defp compile(%Frame{__header: head, __payload: body}) do
head <> body
end
defp payload(
%{type: FrameTypes.continuation(), payload: %{header_block_fragment: fragment}} = frame,
_ctx
) do
%{frame | __payload: fragment, length: byte_size(fragment)}
end
defp payload(%{type: FrameTypes.data(), payload: %{data: data}} = frame, _ctx) do
%{frame | __payload: data}
|> padded_payload
|> put_length
end
defp payload(
%{
type: FrameTypes.goaway(),
payload: %{error: err, last_stream_id: last_sid, debug: debug}
} = frame,
_ctx
) do
err = River.Errors.error_to_code(err)
body =
case debug do
nil -> <<1::1, last_sid::31, err::32>>
_ -> <<1::1, last_sid::31, err::32, debug::binary>>
end
%{frame | __payload: body, length: byte_size(body)}
end
defp payload(
%{type: FrameTypes.headers(), payload: %{header_block_fragment: fragment}} = frame,
_ctx
) do
%{frame | __payload: fragment}
|> weighted_payload
|> padded_payload
|> put_length
end
defp payload(%{type: FrameTypes.ping()} = frame, _ctx) do
%{frame | __payload: :binary.copy(<<0>>, 8), length: 8}
end
defp payload(
%{
type: FrameTypes.push_promise(),
payload: %{headers: headers, promised_stream_id: prom_id}
} = frame,
ctx
) do
%{frame | __payload: <<1::1, prom_id::31>> <> HPack.encode(headers, ctx)}
|> padded_payload
|> put_length
end
defp payload(
%{
type: FrameTypes.priority(),
payload: %{stream_dependency: dep, weight: w, exclusive: ex}
} = frame,
_ctx
) do
ex = if ex, do: 1, else: 0
w = w - 1
%{frame | __payload: <<ex::1, dep::31, w::8>>, length: 5}
end
defp payload(%{type: FrameTypes.rst_stream(), payload: %{error: err}} = frame, _ctx) do
%{frame | __payload: <<River.Errors.error_to_code(err)::32>>, length: 4}
end
defp payload(%{type: FrameTypes.settings(), payload: %{settings: settings}} = frame, _ctx) do
data = Enum.map_join(settings, fn {k, v} -> <<Settings.setting(k)::16, v::32>> end)
%{frame | __payload: data, length: byte_size(data), stream_id: 0}
end
defp payload(
%{type: FrameTypes.window_update(), stream_id: stream, payload: %{increment: inc}} =
frame,
_ctx
) do
%{frame | __payload: <<1::1, inc::31>>, length: 4, stream_id: stream}
end
defp header(%{type: type, stream_id: stream_id, flags: flags, length: len} = frame) do
%{frame | __header: <<len::24, type::8, River.Flags.encode(flags)::8, 1::1, stream_id::31>>}
end
defp weighted_payload(%{payload: %{weight: nil}} = frame), do: frame
defp weighted_payload(
%{__payload: payload, payload: %{weight: w, stream_dependency: dep, exclusive: ex}} =
frame
) do
ex = if ex, do: 1, else: 0
w = w - 1
%{frame | __payload: <<ex::1, dep::31, w::8>> <> payload}
end
defp padded_payload(%{payload: %{padding: 0}} = frame), do: frame
defp padded_payload(%{__payload: payload, payload: %{padding: pl}} = frame) do
%{frame | __payload: <<pl::8>> <> payload <> :crypto.strong_rand_bytes(pl)}
end
defp put_length(%{__payload: payload} = frame) do
%{frame | length: byte_size(payload)}
end
end
| 28.263566 | 97 | 0.585573 |
ff624ff6b3802968c86bbfeaea0620f49e09d511 | 1,200 | ex | Elixir | backend/lib/caffe/support/middleware/validator.ex | eeng/caffe | d85d0dd56a8204c715052ddaf3d990e47c5df0e9 | [
"MIT"
] | 7 | 2020-03-27T08:26:52.000Z | 2021-08-29T09:50:31.000Z | backend/lib/caffe/support/middleware/validator.ex | eeng/caffe | d85d0dd56a8204c715052ddaf3d990e47c5df0e9 | [
"MIT"
] | null | null | null | backend/lib/caffe/support/middleware/validator.ex | eeng/caffe | d85d0dd56a8204c715052ddaf3d990e47c5df0e9 | [
"MIT"
] | null | null | null | defmodule Caffe.Middleware.Validator do
@behaviour Commanded.Middleware
alias Commanded.Middleware.Pipeline
import Pipeline
def before_dispatch(%Pipeline{command: command} = pipeline) do
changeset = build_changeset(command)
case Ecto.Changeset.apply_action(changeset, :insert) do
{:ok, command} ->
# Replace the command as the new one may have fields casted, defaults assigned, etc.
pipeline |> Map.put(:command, command)
{:error, changeset} ->
pipeline
|> respond({:error, {:invalid_command, transform_errors(changeset)}})
|> halt()
end
end
def after_dispatch(%Pipeline{} = pipeline) do
pipeline
end
def after_failure(%Pipeline{} = pipeline) do
pipeline
end
# All commands must implement a changeset/2 function for this to work
defp build_changeset(command) do
command.__struct__.changeset(struct(command.__struct__), Map.from_struct(command))
end
def transform_errors(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {msg, opts} ->
Enum.reduce(opts, msg, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end)
end
end
| 27.906977 | 92 | 0.685 |
ff62600c00f99ca733e648589bde975759462cf7 | 373 | exs | Elixir | example of using ExCrypto.exs | pmarreck/elixir-snippets | 5f5ee26087bc2ded4e71c4c3eeff1231310ff358 | [
"BSD-3-Clause"
] | 34 | 2015-02-27T14:41:12.000Z | 2021-09-26T06:06:18.000Z | example of using ExCrypto.exs | pmarreck/elixir-snippets | 5f5ee26087bc2ded4e71c4c3eeff1231310ff358 | [
"BSD-3-Clause"
] | null | null | null | example of using ExCrypto.exs | pmarreck/elixir-snippets | 5f5ee26087bc2ded4e71c4c3eeff1231310ff358 | [
"BSD-3-Clause"
] | 3 | 2016-02-05T16:09:41.000Z | 2017-10-21T15:47:04.000Z | arbitrary_data = %{id: 6, name: "test"}
clear_text = :erlang.term_to_binary(arbitrary_data)
{:ok, aes_256_key} = ExCrypto.generate_aes_key(:aes_256, :bytes)
{:ok, {init_vec, cipher_text}} = ExCrypto.encrypt(aes_256_key, clear_text)
16 = String.length(init_vec)
{:ok, val} = ExCrypto.decrypt(aes_256_key, init_vec, cipher_text)
val = clear_text
:erlang.binary_to_term(val)
| 37.3 | 74 | 0.758713 |
ff62615a6eba0528dcc3494f4d42aaba0c97fd17 | 1,130 | ex | Elixir | test/support/channel_case.ex | noozo/honest_chat | b3c45f72fd9c579404d05ce308eb13a0a2997760 | [
"Apache-2.0"
] | 3 | 2021-11-19T07:18:30.000Z | 2021-12-23T22:19:48.000Z | test/support/channel_case.ex | noozo/honest_chat | b3c45f72fd9c579404d05ce308eb13a0a2997760 | [
"Apache-2.0"
] | null | null | null | test/support/channel_case.ex | noozo/honest_chat | b3c45f72fd9c579404d05ce308eb13a0a2997760 | [
"Apache-2.0"
] | null | null | null | defmodule HonestChatWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use HonestChatWeb.ChannelCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
alias Ecto.Adapters.SQL.Sandbox
using do
quote do
# Import conveniences for testing with channels
import Phoenix.ChannelTest
import HonestChatWeb.ChannelCase
# The default endpoint for testing
@endpoint HonestChatWeb.Endpoint
end
end
setup tags do
pid = Sandbox.start_owner!(HonestChat.Repo, shared: not tags[:async])
on_exit(fn -> Sandbox.stop_owner(pid) end)
:ok
end
end
| 28.974359 | 73 | 0.727434 |
ff627149f37684a55ed4365bcce55f10399feb4d | 766 | exs | Elixir | test/phoenix/transports/websocket_serializer_test.exs | misfo/phoenix | 04464429d9b958e331b2ffe0f0f5926690ab3b56 | [
"MIT"
] | 1 | 2019-06-11T20:22:21.000Z | 2019-06-11T20:22:21.000Z | test/phoenix/transports/websocket_serializer_test.exs | misfo/phoenix | 04464429d9b958e331b2ffe0f0f5926690ab3b56 | [
"MIT"
] | null | null | null | test/phoenix/transports/websocket_serializer_test.exs | misfo/phoenix | 04464429d9b958e331b2ffe0f0f5926690ab3b56 | [
"MIT"
] | 2 | 2020-08-02T04:00:17.000Z | 2020-10-07T16:07:37.000Z | defmodule Phoenix.Tranports.WebSocketSerializerTest do
use ExUnit.Case, async: true
alias Phoenix.Transports.WebSocketSerializer
alias Phoenix.Socket.Message
@msg_json [123, [[34, ["topic"], 34], 58, [34, ["t"], 34], 44, [34, ["ref"], 34], 58, "null", 44, [34, ["payload"], 34], 58, [34, ["m"], 34], 44, [34, ["event"], 34], 58, [34, ["e"], 34]], 125]
test "encode!/1 encodes `Phoenix.Socket.Message` as JSON" do
msg = %Message{topic: "t", event: "e", payload: "m"}
assert WebSocketSerializer.encode!(msg) == {:socket_push, :text, @msg_json}
end
test "decode!/2 decodes `Phoenix.Socket.Message` from JSON" do
assert %Message{topic: "t", event: "e", payload: "m"} ==
WebSocketSerializer.decode!(@msg_json, opcode: :text)
end
end
| 40.315789 | 195 | 0.63577 |
ff6293da4a0b93bed3b53698bb6cfd0bf7058dfb | 882 | ex | Elixir | lib/decoding/export_section_parser.ex | ElixiumNetwork/WaspVM | 7a313825ed50720256dda6e61e83356621357963 | [
"MIT"
] | 112 | 2018-12-14T23:43:38.000Z | 2019-03-22T22:02:27.000Z | lib/decoding/export_section_parser.ex | ElixiumNetwork/Elixium-Wasm | 7a313825ed50720256dda6e61e83356621357963 | [
"MIT"
] | 21 | 2018-12-22T23:53:14.000Z | 2019-02-21T00:18:05.000Z | lib/decoding/export_section_parser.ex | ElixiumNetwork/Elixium-Wasm | 7a313825ed50720256dda6e61e83356621357963 | [
"MIT"
] | 4 | 2019-05-11T16:37:19.000Z | 2021-05-18T13:45:06.000Z | defmodule AlchemyVM.Decoder.ExportSectionParser do
alias AlchemyVM.LEB128
alias AlchemyVM.OpCodes
require IEx
@moduledoc false
def parse(section) do
{count, entries} = LEB128.decode_unsigned(section)
entries = if count > 0, do: parse_entries(entries), else: []
entries = entries |> Enum.reject(&(&1 == nil))
{:exports, Enum.reverse(entries)}
end
defp parse_entries(entries), do: parse_entries([], entries)
defp parse_entries(parsed, <<>>), do: parsed
defp parse_entries(parsed, entries) do
{field_len, entries} = LEB128.decode_unsigned(entries)
<<field_str::bytes-size(field_len), kind, entries::binary>> = entries
{index, entries} = LEB128.decode_unsigned(entries)
entry = %{
name: field_str,
kind: OpCodes.external_kind(kind),
index: index
}
parse_entries([entry | parsed], entries)
end
end
| 23.837838 | 73 | 0.68254 |
ff62b1e3c75897a8f45595131dd6cfbd56d9527a | 1,249 | ex | Elixir | lib/elixir_lokalise_api/config.ex | kianmeng/elixir-lokalise-api | fca59438cbd8ee960adbbce8b13cef12ddc68ef6 | [
"BSD-3-Clause"
] | 1 | 2021-09-27T09:48:29.000Z | 2021-09-27T09:48:29.000Z | lib/elixir_lokalise_api/config.ex | kianmeng/elixir-lokalise-api | fca59438cbd8ee960adbbce8b13cef12ddc68ef6 | [
"BSD-3-Clause"
] | null | null | null | lib/elixir_lokalise_api/config.ex | kianmeng/elixir-lokalise-api | fca59438cbd8ee960adbbce8b13cef12ddc68ef6 | [
"BSD-3-Clause"
] | null | null | null | defmodule ElixirLokaliseApi.Config do
@moduledoc """
Stores configuration variables used to communicate with Lokalise APIv2.
All settings also accept `{:system, "ENV_VAR_NAME"}` to read their
values from environment variables at runtime.
"""
@doc """
Returns Lokalise APIv2 token. Set it inside your `mix.exs`:
config :elixir_lokalise_api, api_token: "YOUR_API_TOKEN"
"""
def api_token, do: from_env(:api_token)
def request_options, do: from_env(:request_options, Keyword.new())
@doc """
Returns package version
"""
def version, do: from_env(:version, "1.0.0")
@doc """
Returns the base URL of the Lokalise APIv2
"""
def base_url, do: "https://api.lokalise.com/api2/"
@doc """
A light wrapper around `Application.get_env/2`, providing automatic support for
`{:system, "VAR"}` tuples. Based on https://github.com/danielberkompas/ex_twilio/blob/master/lib/ex_twilio/config.ex
"""
def from_env(key, default \\ nil)
def from_env(key, default) do
:elixir_lokalise_api
|> Application.get_env(key, default)
|> read_from_system(default)
end
defp read_from_system({:system, env}, default), do: System.get_env(env) || default
defp read_from_system(value, _default), do: value
end
| 30.463415 | 118 | 0.709367 |
ff62c94e8f4b4476da74b9fa17f06410f0e3764e | 1,116 | exs | Elixir | implements/pascals-triangle/pascals_triangle_test.exs | MickeyOoh/Exercises | 3b34e7fdab4a09e0269d20c68531b4fb75bb7f16 | [
"MIT"
] | null | null | null | implements/pascals-triangle/pascals_triangle_test.exs | MickeyOoh/Exercises | 3b34e7fdab4a09e0269d20c68531b4fb75bb7f16 | [
"MIT"
] | 1 | 2018-06-19T18:59:41.000Z | 2018-06-19T18:59:41.000Z | implements/pascals-triangle/pascals_triangle_test.exs | MickeyOoh/Exercises | 3b34e7fdab4a09e0269d20c68531b4fb75bb7f16 | [
"MIT"
] | null | null | null | #if !System.get_env("EXERCISM_TEST_EXAMPLES") do
Code.load_file("pascals_triangle.exs", __DIR__)
#end
ExUnit.start()
#ExUnit.configure(exclude: :pending, trace: true)
defmodule PascalsTriangleTest do
use ExUnit.Case
# @tag pending
test "one row" do
assert PascalsTriangle.rows(1) == [[1]]
end
@tag :pending
test "two rows" do
assert PascalsTriangle.rows(2) == [[1], [1, 1]]
end
@tag :pending
test "three rows" do
assert PascalsTriangle.rows(3) == [[1], [1, 1], [1, 2, 1]]
end
@tag :pending
test "fourth row" do
assert List.last(PascalsTriangle.rows(4)) == [1, 3, 3, 1]
end
@tag :pending
test "fifth row" do
assert List.last(PascalsTriangle.rows(5)) == [1, 4, 6, 4, 1]
end
@tag :pending
test "twentieth row" do
expected = [
1,
19,
171,
969,
3876,
11_628,
27_132,
50_388,
75_582,
92_378,
92_378,
75_582,
50_388,
27_132,
11_628,
3876,
969,
171,
19,
1
]
assert List.last(PascalsTriangle.rows(20)) == expected
end
end
| 17.4375 | 64 | 0.576165 |
ff62d6cc7d22db197f4ce1ec4d28134d96c76381 | 127 | ex | Elixir | lib/exkonsument/exchange.ex | ivx/ExKonsument | cce7f4f5a78c5d2910358e34649cdc70b299618d | [
"MIT"
] | 3 | 2016-08-22T12:27:09.000Z | 2021-03-17T19:24:43.000Z | lib/exkonsument/exchange.ex | ivx/ExKonsument | cce7f4f5a78c5d2910358e34649cdc70b299618d | [
"MIT"
] | 50 | 2018-05-24T08:10:43.000Z | 2021-06-28T11:59:24.000Z | lib/exkonsument/exchange.ex | ivx/ExKonsument | cce7f4f5a78c5d2910358e34649cdc70b299618d | [
"MIT"
] | null | null | null | defmodule ExKonsument.Exchange do
@moduledoc false
defstruct name: nil,
type: nil,
options: []
end
| 18.142857 | 33 | 0.606299 |
ff62f8576d7919a7067670c4e58c6cfe2b732a03 | 118,321 | ex | Elixir | lib/ecto/changeset.ex | CrazyEggInc/ecto | 790616281a787a9f5715476c62d811b8b64beedd | [
"Apache-2.0"
] | null | null | null | lib/ecto/changeset.ex | CrazyEggInc/ecto | 790616281a787a9f5715476c62d811b8b64beedd | [
"Apache-2.0"
] | null | null | null | lib/ecto/changeset.ex | CrazyEggInc/ecto | 790616281a787a9f5715476c62d811b8b64beedd | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Changeset do
@moduledoc ~S"""
Changesets allow filtering, casting, validation and
definition of constraints when manipulating structs.
There is an example of working with changesets in the introductory
documentation in the `Ecto` module. The functions `cast/4` and
`change/2` are the usual entry points for creating changesets.
The first one is used to cast and validate external parameters,
such as parameters sent through a form, API, command line, etc.
The second one is used to change data directly from your application.
The remaining functions in this module, such as validations,
constraints, association handling, are about manipulating
changesets. Let's discuss some of this extra functionality.
## External vs internal data
Changesets allow working with both kinds of data:
* internal to the application - for example programmatically generated,
or coming from other subsystems. This use case is primarily covered
by the `change/2` and `put_change/3` functions.
* external to the application - for example data provided by the user in
a form that needs to be type-converted and properly validated. This
use case is primarily covered by the `cast/4` function.
## Validations and constraints
Ecto changesets provide both validations and constraints which
are ultimately turned into errors in case something goes wrong.
The difference between them is that most validations can be
executed without a need to interact with the database and, therefore,
are always executed before attempting to insert or update the entry
in the database. Some validations may happen against the database but
they are inherently unsafe. Those validations start with a `unsafe_`
prefix, such as `unsafe_validate_unique/3`.
On the other hand, constraints rely on the database and are always safe.
As a consequence, validations are always checked before constraints.
Constraints won't even be checked in case validations failed.
Let's see an example:
defmodule User do
use Ecto.Schema
import Ecto.Changeset
schema "users" do
field :name
field :email
field :age, :integer
end
def changeset(user, params \\ %{}) do
user
|> cast(params, [:name, :email, :age])
|> validate_required([:name, :email])
|> validate_format(:email, ~r/@/)
|> validate_inclusion(:age, 18..100)
|> unique_constraint(:email)
end
end
In the `changeset/2` function above, we define three validations.
They check that `name` and `email` fields are present in the
changeset, the e-mail is of the specified format, and the age is
between 18 and 100 - as well as a unique constraint in the email
field.
Let's suppose the e-mail is given but the age is invalid. The
changeset would have the following errors:
changeset = User.changeset(%User{}, %{age: 0, email: "mary@example.com"})
{:error, changeset} = Repo.insert(changeset)
changeset.errors #=> [age: {"is invalid", []}, name: {"can't be blank", []}]
In this case, we haven't checked the unique constraint in the
e-mail field because the data did not validate. Let's fix the
age and the name, and assume that the e-mail already exists in the
database:
changeset = User.changeset(%User{}, %{age: 42, name: "Mary", email: "mary@example.com"})
{:error, changeset} = Repo.insert(changeset)
changeset.errors #=> [email: {"has already been taken", []}]
Validations and constraints define an explicit boundary when the check
happens. By moving constraints to the database, we also provide a safe,
correct and data-race free means of checking the user input.
### Deferred constraints
Some databases support deferred constraints, i.e., constraints which are
checked at the end of the transaction rather than at the end of each statement.
Changesets do not support this type of constraints. When working with deferred
constraints, a violation while invoking `Repo.insert/2` or `Repo.update/2` won't
return `{:error, changeset}`, but rather raise an error at the end of the
transaction.
## Empty values
Many times, the data given on cast needs to be further pruned, specially
regarding empty values. For example, if you are gathering data to be
cast from the command line or through an HTML form or any other text-based
format, it is likely those means cannot express nil values. For
those reasons, changesets include the concept of empty values, which are
values that will be automatically converted to the field's default value
on `cast/4`. Those values are stored in the changeset `empty_values` field
and default to `[""]`. You can also pass the `:empty_values` option to
`cast/4` in case you want to change how a particular `cast/4` work.
## Associations, embeds and on replace
Using changesets you can work with associations as well as with embedded
structs. There are two primary APIs:
* `cast_assoc/3` and `cast_embed/3` - those functions are used when
working with external data. In particular, they allow you to change
associations and embeds alongside the parent struct, all at once.
* `put_assoc/4` and `put_embed/4` - it allows you to replace the
association or embed as a whole. This can be used to move associated
data from one entry to another, to completely remove or replace
existing entries.
See the documentation for those functions for more information.
### The `:on_replace` option
When using any of those APIs, you may run into situations where Ecto sees
data is being replaced. For example, imagine a Post has many Comments where
the comments have IDs 1, 2 and 3. If you call `cast_assoc/3` passing only
the IDs 1 and 2, Ecto will consider 3 is being "replaced" and it will raise
by default. Such behaviour can be changed when defining the relation by
setting `:on_replace` option when defining your association/embed according
to the values below:
* `:raise` (default) - do not allow removing association or embedded
data via parent changesets
* `:mark_as_invalid` - if attempting to remove the association or
embedded data via parent changeset - an error will be added to the parent
changeset, and it will be marked as invalid
* `:nilify` - sets owner reference column to `nil` (available only for
associations). Use this on a `belongs_to` column to allow the association
to be cleared out so that it can be set to a new value. Will set `action`
on associated changesets to `:replace`
* `:update` - updates the association, available only for `has_one`, `belongs_to`
and `embeds_one`. This option will update all the fields given to the changeset
including the id for the association
* `:delete` - removes the association or related data from the database.
This option has to be used carefully (see below). Will set `action` on associated
changesets to `:replace`
* `:delete_if_exists` - like `:delete` except that it ignores any stale entry
error. For instance, if you set `on_replace: :delete` but the replaced
resource was already deleted by a separate request, it will raise a
`Ecto.StaleEntryError`. `:delete_if_exists` makes it so it will only delete
if the entry still exists
The `:delete` and `:delete_if_exists` options must be used carefully as they allow
users to delete any associated data by simply not sending the associated data.
If you need deletion, it is often preferred to manually mark the changeset
for deletion if a `delete` field is set in the params, as in the example below:
defmodule Comment do
use Ecto.Schema
import Ecto.Changeset
schema "comments" do
field :body, :string
end
def changeset(comment, %{"delete" => "true"}) do
%{Ecto.Changeset.change(comment) | action: :delete}
end
def changeset(comment, params) do
cast(comment, params, [:body])
end
end
## Schemaless changesets
In the changeset examples so far, we have always used changesets to validate
and cast data contained in a struct defined by an Ecto schema, such as the `%User{}`
struct defined by the `User` module.
However, changesets can also be used with "regular" structs too by passing a tuple
with the data and its types:
user = %User{}
types = %{first_name: :string, last_name: :string, email: :string}
changeset =
{user, types}
|> Ecto.Changeset.cast(params, Map.keys(types))
|> Ecto.Changeset.validate_required(...)
|> Ecto.Changeset.validate_length(...)
where the user struct refers to the definition in the following module:
defmodule User do
defstruct [:name, :age]
end
Changesets can also be used with data in a plain map, by following the same API:
data = %{}
types = %{name: :string}
params = %{name: "Callum"}
changeset =
{data, types}
|> Ecto.Changeset.cast(params, Map.keys(types))
|> Ecto.Changeset.validate_required(...)
|> Ecto.Changeset.validate_length(...)
Such functionality makes Ecto extremely useful to cast, validate and prune data even
if it is not meant to be persisted to the database.
### Changeset actions
Changesets have an action field which is usually set by `Ecto.Repo`
whenever one of the operations such as `insert` or `update` is called:
changeset = User.changeset(%User{}, %{age: 42, email: "mary@example.com"})
{:error, changeset} = Repo.insert(changeset)
changeset.action
#=> :insert
This means that when working with changesets that are not meant to be
persisted to the database, such as schemaless changesets, you may need
to explicitly set the action to one specific value. Frameworks such as
Phoenix use the action value to define how HTML forms should act.
Instead of setting the action manually, you may use `apply_action/2` that
emulates operations such as `Repo.insert`. `apply_action/2` will return
`{:ok, changes}` if the changeset is valid or `{:error, changeset}`, with
the given `action` set in the changeset in case of errors.
## The Ecto.Changeset struct
The public fields are:
* `valid?` - Stores if the changeset is valid
* `data` - The changeset source data, for example, a struct
* `params` - The parameters as given on changeset creation
* `changes` - The `changes` from parameters that were approved in casting
* `errors` - All errors from validations
* `required` - All required fields as a list of atoms
* `action` - The action to be performed with the changeset
* `types` - Cache of the data's field types
* `empty_values` - A list of values to be considered empty
* `repo` - The repository applying the changeset (only set after a Repo function is called)
* `repo_opts` - A keyword list of options given to the underlying repository operation
The following fields are private and must not be accessed directly.
* `validations`
* `constraints`
* `filters`
* `prepare`
### Redacting fields in inspect
To hide a field's value from the inspect protocol of `Ecto.Changeset`, mark
the field as `redact: true` in the schema, and it will display with the
value `**redacted**`.
"""
require Ecto.Query
alias __MODULE__
alias Ecto.Changeset.Relation
@empty_values [""]
# If a new field is added here, def merge must be adapted
defstruct valid?: false, data: nil, params: nil, changes: %{},
errors: [], validations: [], required: [], prepare: [],
constraints: [], filters: %{}, action: nil, types: nil,
empty_values: @empty_values, repo: nil, repo_opts: []
@type t(data_type) :: %Changeset{
valid?: boolean(),
repo: atom | nil,
repo_opts: Keyword.t(),
data: data_type,
params: %{optional(String.t()) => term} | nil,
changes: %{optional(atom) => term},
required: [atom],
prepare: [(t -> t)],
errors: [{atom, error}],
constraints: [constraint],
validations: [{atom, term}],
filters: %{optional(atom) => term},
action: action,
types: nil | %{atom => Ecto.Type.t() | {:assoc, term()} | {:embed, term()}}
}
@type t :: t(Ecto.Schema.t | map | nil)
@type error :: {String.t, Keyword.t}
@type action :: nil | :insert | :update | :delete | :replace | :ignore | atom
@type constraint :: %{type: :check | :exclusion | :foreign_key | :unique,
constraint: String.t, match: :exact | :suffix | :prefix,
field: atom, error_message: String.t, error_type: atom}
@type data :: map()
@type types :: map()
@number_validators %{
less_than: {&</2, "must be less than %{number}"},
greater_than: {&>/2, "must be greater than %{number}"},
less_than_or_equal_to: {&<=/2, "must be less than or equal to %{number}"},
greater_than_or_equal_to: {&>=/2, "must be greater than or equal to %{number}"},
equal_to: {&==/2, "must be equal to %{number}"},
not_equal_to: {&!=/2, "must be not equal to %{number}"},
}
@relations [:embed, :assoc]
@match_types [:exact, :suffix, :prefix]
@doc """
Wraps the given data in a changeset or adds changes to a changeset.
`changes` is a map or keyword where the key is an atom representing a
field, association or embed and the value is a term. Note the `value` is
directly stored in the changeset with no validation whatsoever. For this
reason, this function is meant for working with data internal to the
application.
When changing embeds and associations, see `put_assoc/4` for a complete
reference on the accepted values.
This function is useful for:
* wrapping a struct inside a changeset
* directly changing a struct without performing castings nor validations
* directly bulk-adding changes to a changeset
Changed attributes will only be added if the change does not have the
same value as the field in the data.
When a changeset is passed as the first argument, the changes passed as the
second argument are merged over the changes already in the changeset if they
differ from the values in the struct.
When a `{data, types}` is passed as the first argument, a changeset is
created with the given data and types and marked as valid.
See `cast/4` if you'd prefer to cast and validate external parameters.
## Examples
iex> changeset = change(%Post{})
%Ecto.Changeset{...}
iex> changeset.valid?
true
iex> changeset.changes
%{}
iex> changeset = change(%Post{author: "bar"}, title: "title")
iex> changeset.changes
%{title: "title"}
iex> changeset = change(%Post{title: "title"}, title: "title")
iex> changeset.changes
%{}
iex> changeset = change(changeset, %{title: "new title", body: "body"})
iex> changeset.changes.title
"new title"
iex> changeset.changes.body
"body"
"""
@spec change(Ecto.Schema.t | t | {data, types}, %{atom => term} | Keyword.t) :: t
def change(data, changes \\ %{})
def change({data, types}, changes) when is_map(data) do
change(%Changeset{data: data, types: Enum.into(types, %{}), valid?: true}, changes)
end
def change(%Changeset{types: nil}, _changes) do
raise ArgumentError, "changeset does not have types information"
end
def change(%Changeset{changes: changes, types: types} = changeset, new_changes)
when is_map(new_changes) or is_list(new_changes) do
{changes, errors, valid?} =
get_changed(changeset.data, types, changes, new_changes,
changeset.errors, changeset.valid?)
%{changeset | changes: changes, errors: errors, valid?: valid?}
end
def change(%{__struct__: struct} = data, changes) when is_map(changes) or is_list(changes) do
types = struct.__changeset__()
{changes, errors, valid?} = get_changed(data, types, %{}, changes, [], true)
%Changeset{valid?: valid?, data: data, changes: changes,
errors: errors, types: types}
end
defp get_changed(data, types, old_changes, new_changes, errors, valid?) do
Enum.reduce(new_changes, {old_changes, errors, valid?}, fn
{key, value}, {changes, errors, valid?} ->
put_change(data, changes, errors, valid?, key, value, Map.get(types, key))
end)
end
@doc """
Applies the given `params` as changes for the given `data` according to
the given set of `permitted` keys. Returns a changeset.
The given `data` may be either a changeset, a schema struct or a `{data, types}`
tuple. The second argument is a map of `params` that are cast according
to the type information from `data`. `params` is a map with string keys
or a map with atom keys, containing potentially invalid data. Mixed keys
are not allowed.
During casting, all `permitted` parameters whose values match the specified
type information will have their key name converted to an atom and stored
together with the value as a change in the `:changes` field of the changeset.
All parameters that are not explicitly permitted are ignored.
If casting of all fields is successful, the changeset is returned as valid.
Note that `cast/4` validates the types in the `params`, but not in the given
`data`.
## Options
* `:empty_values` - a list of values to be considered as empty when casting.
Empty values are always replaced by the default value of the respective key. Defaults to `[""]`
## Examples
iex> changeset = cast(post, params, [:title])
iex> if changeset.valid? do
...> Repo.update!(changeset)
...> end
Passing a changeset as the first argument:
iex> changeset = cast(post, %{title: "Hello"}, [:title])
iex> new_changeset = cast(changeset, %{title: "Foo", body: "World"}, [:body])
iex> new_changeset.params
%{"title" => "Hello", "body" => "World"}
Or creating a changeset from a simple map with types:
iex> data = %{title: "hello"}
iex> types = %{title: :string}
iex> changeset = cast({data, types}, %{title: "world"}, [:title])
iex> apply_changes(changeset)
%{title: "world"}
## Composing casts
`cast/4` also accepts a changeset as its first argument. In such cases, all
the effects caused by the call to `cast/4` (additional errors and changes)
are simply added to the ones already present in the argument changeset.
Parameters are merged (**not deep-merged**) and the ones passed to `cast/4`
take precedence over the ones already in the changeset.
"""
@spec cast(Ecto.Schema.t | t | {data, types},
%{binary => term} | %{atom => term} | :invalid,
[atom],
Keyword.t) :: t
def cast(data, params, permitted, opts \\ [])
def cast(_data, %{__struct__: _} = params, _permitted, _opts) do
raise Ecto.CastError, type: :map, value: params,
message: "expected params to be a :map, got: `#{inspect(params)}`"
end
def cast({data, types}, params, permitted, opts) when is_map(data) do
cast(data, types, %{}, params, permitted, opts)
end
def cast(%Changeset{types: nil}, _params, _permitted, _opts) do
raise ArgumentError, "changeset does not have types information"
end
def cast(%Changeset{changes: changes, data: data, types: types, empty_values: empty_values} = changeset,
params, permitted, opts) do
opts = Keyword.put_new(opts, :empty_values, empty_values)
new_changeset = cast(data, types, changes, params, permitted, opts)
cast_merge(changeset, new_changeset)
end
def cast(%{__struct__: module} = data, params, permitted, opts) do
cast(data, module.__changeset__(), %{}, params, permitted, opts)
end
defp cast(%{} = data, %{} = types, %{} = changes, :invalid, permitted, _opts) when is_list(permitted) do
_ = Enum.each(permitted, &cast_key/1)
%Changeset{params: nil, data: data, valid?: false, errors: [],
changes: changes, types: types}
end
defp cast(%{} = data, %{} = types, %{} = changes, %{} = params, permitted, opts) when is_list(permitted) do
empty_values = Keyword.get(opts, :empty_values, @empty_values)
params = convert_params(params)
defaults = case data do
%{__struct__: struct} -> struct.__struct__()
%{} -> %{}
end
{changes, errors, valid?} =
Enum.reduce(permitted, {changes, [], true},
&process_param(&1, params, types, data, empty_values, defaults, &2))
%Changeset{params: params, data: data, valid?: valid?,
errors: Enum.reverse(errors), changes: changes, types: types}
end
defp cast(%{}, %{}, %{}, params, permitted, _opts) when is_list(permitted) do
raise Ecto.CastError, type: :map, value: params,
message: "expected params to be a :map, got: `#{inspect params}`"
end
defp process_param(key, params, types, data, empty_values, defaults, {changes, errors, valid?}) do
{key, param_key} = cast_key(key)
type = cast_type!(types, key)
current =
case changes do
%{^key => value} -> value
_ -> Map.get(data, key)
end
case cast_field(key, param_key, type, params, current, empty_values, defaults, valid?) do
{:ok, value, valid?} ->
{Map.put(changes, key, value), errors, valid?}
:missing ->
{changes, errors, valid?}
{:invalid, custom_errors} ->
{message, new_errors} =
custom_errors
|> Keyword.put_new(:validation, :cast)
|> Keyword.put(:type, type)
|> Keyword.pop(:message, "is invalid")
{changes, [{key, {message, new_errors}} | errors], false}
end
end
defp cast_type!(types, key) do
case types do
%{^key => {tag, _}} when tag in @relations ->
raise "casting #{tag}s with cast/4 for #{inspect key} field is not supported, use cast_#{tag}/3 instead"
%{^key => type} ->
type
_ ->
known_fields = types |> Map.keys() |> Enum.map_join(", ", &inspect/1)
raise ArgumentError,
"unknown field `#{inspect(key)}` given to cast. Either the field does not exist or it is a " <>
":through association (which are read-only). The known fields are: #{known_fields}"
end
end
defp cast_key(key) when is_atom(key),
do: {key, Atom.to_string(key)}
defp cast_key(key),
do: raise ArgumentError, "cast/3 expects a list of atom keys, got: `#{inspect key}`"
defp cast_field(key, param_key, type, params, current, empty_values, defaults, valid?) do
case params do
%{^param_key => value} ->
value = if value in empty_values, do: Map.get(defaults, key), else: value
case Ecto.Type.cast(type, value) do
{:ok, value} ->
if Ecto.Type.equal?(type, current, value) do
:missing
else
{:ok, value, valid?}
end
:error ->
{:invalid, []}
{:error, custom_errors} when is_list(custom_errors) ->
{:invalid, custom_errors}
end
_ ->
:missing
end
end
# TODO: Remove branch when we require Elixir v1.10+.
if Code.ensure_loaded?(:maps) and function_exported?(:maps, :iterator, 1) do
# We only look at the first element because traversing the whole map
# can be expensive and it was showing up during profiling. This means
# we won't always raise, but the check only exists for user convenience
# anyway, and it is not a guarantee.
defp convert_params(params) do
case :maps.next(:maps.iterator(params)) do
{key, _, _} when is_atom(key) ->
for {key, value} <- params, into: %{} do
if is_atom(key) do
{Atom.to_string(key), value}
else
raise Ecto.CastError, type: :map, value: params,
message: "expected params to be a map with atoms or string keys, " <>
"got a map with mixed keys: #{inspect params}"
end
end
_ ->
params
end
end
else
defp convert_params(params) do
params
|> Enum.reduce(nil, fn
{key, _value}, nil when is_binary(key) ->
nil
{key, _value}, _ when is_binary(key) ->
raise Ecto.CastError, type: :map, value: params,
message: "expected params to be a map with atoms or string keys, " <>
"got a map with mixed keys: #{inspect params}"
{key, value}, nil when is_atom(key) ->
[{Atom.to_string(key), value}]
{key, value}, acc when is_atom(key) ->
[{Atom.to_string(key), value} | acc]
end)
|> case do
nil -> params
list -> :maps.from_list(list)
end
end
end
## Casting related
@doc """
Casts the given association with the changeset parameters.
This function should be used when working with the entire association at
once (and not a single element of a many-style association) and receiving
data external to the application.
`cast_assoc/3` works matching the records extracted from the database
and compares it with the parameters received from an external source.
Therefore, it is expected that the data in the changeset has explicitly
preloaded the association being cast and that all of the IDs exist and
are unique.
For example, imagine a user has many addresses relationship where
post data is sent as follows
%{"name" => "john doe", "addresses" => [
%{"street" => "somewhere", "country" => "brazil", "id" => 1},
%{"street" => "elsewhere", "country" => "poland"},
]}
and then
User
|> Repo.get!(id)
|> Repo.preload(:addresses) # Only required when updating data
|> Ecto.Changeset.cast(params, [])
|> Ecto.Changeset.cast_assoc(:addresses, with: &MyApp.Address.changeset/2)
The parameters for the given association will be retrieved
from `changeset.params`. Those parameters are expected to be
a map with attributes, similar to the ones passed to `cast/4`.
Once parameters are retrieved, `cast_assoc/3` will match those
parameters with the associations already in the changeset record.
Once `cast_assoc/3` is called, Ecto will compare each parameter
with the user's already preloaded addresses and act as follows:
* If the parameter does not contain an ID, the parameter data
will be passed to `MyApp.Address.changeset/2` with a new struct
and become an insert operation
* If the parameter contains an ID and there is no associated child
with such ID, the parameter data will be passed to
`MyApp.Address.changeset/2` with a new struct and become an insert
operation
* If the parameter contains an ID and there is an associated child
with such ID, the parameter data will be passed to
`MyApp.Address.changeset/2` with the existing struct and become an
update operation
* If there is an associated child with an ID and its ID is not given
as parameter, the `:on_replace` callback for that association will
be invoked (see the "On replace" section on the module documentation)
Every time the `MyApp.Address.changeset/2` function is invoked, it must
return a changeset. Once the parent changeset is given to an `Ecto.Repo`
function, all entries will be inserted/updated/deleted within the same
transaction.
Note developers are allowed to explicitly set the `:action` field of a
changeset to instruct Ecto how to act in certain situations. Let's suppose
that, if one of the associations has only empty fields, you want to ignore
the entry altogether instead of showing an error. The changeset function could
be written like this:
def changeset(struct, params) do
struct
|> cast(params, [:title, :body])
|> validate_required([:title, :body])
|> case do
%{valid?: false, changes: changes} = changeset when changes == %{} ->
# If the changeset is invalid and has no changes, it is
# because all required fields are missing, so we ignore it.
%{changeset | action: :ignore}
changeset ->
changeset
end
end
## Partial changes for many-style associations
By preloading an association using a custom query you can confine the behavior
of `cast_assoc/3`. This opens up the possibility to work on a subset of the data,
instead of all associations in the database.
Taking the initial example of users having addresses imagine those addresses
are set up to belong to a country. If you want to allow users to bulk edit all
addresses that belong to a single country, you can do so by changing the preload
query:
query = from MyApp.Address, where: [country: ^edit_country]
User
|> Repo.get!(id)
|> Repo.preload(addresses: query)
|> Ecto.Changeset.cast(params, [])
|> Ecto.Changeset.cast_assoc(:addresses)
This will allow you to cast and update only the association for the given country.
The important point for partial changes is that any addresses, which were not
preloaded won't be changed.
## Options
* `:required` - if the association is a required field
* `:required_message` - the message on failure, defaults to "can't be blank"
* `:invalid_message` - the message on failure, defaults to "is invalid"
* `:force_update_on_change` - force the parent record to be updated in the
repository if there is a change, defaults to `true`
* `:with` - the function to build the changeset from params. Defaults to the
`changeset/2` function of the associated module. It can be changed by passing
an anonymous function or an MFA tuple. If using an MFA, the default changeset
and parameters arguments will be prepended to the given args. For example,
using `with: {Author, :special_changeset, ["hello"]}` will be invoked as
`Author.special_changeset(changeset, params, "hello")`
"""
def cast_assoc(changeset, name, opts \\ []) when is_atom(name) do
cast_relation(:assoc, changeset, name, opts)
end
@doc """
Casts the given embed with the changeset parameters.
The parameters for the given embed will be retrieved
from `changeset.params`. Those parameters are expected to be
a map with attributes, similar to the ones passed to `cast/4`.
Once parameters are retrieved, `cast_embed/3` will match those
parameters with the embeds already in the changeset record.
See `cast_assoc/3` for an example of working with casts and
associations which would also apply for embeds.
The changeset must have been previously `cast` using
`cast/4` before this function is invoked.
## Options
* `:required` - if the embed is a required field
* `:required_message` - the message on failure, defaults to "can't be blank"
* `:invalid_message` - the message on failure, defaults to "is invalid"
* `:force_update_on_change` - force the parent record to be updated in the
repository if there is a change, defaults to `true`
* `:with` - the function to build the changeset from params. Defaults to the
`changeset/2` function of the embedded module. It can be changed by passing
an anonymous function or an MFA tuple. If using an MFA, the default changeset
and parameters arguments will be prepended to the given args. For example,
using `with: {Author, :special_changeset, ["hello"]}` will be invoked as
`Author.special_changeset(changeset, params, "hello")`
"""
def cast_embed(changeset, name, opts \\ []) when is_atom(name) do
cast_relation(:embed, changeset, name, opts)
end
defp cast_relation(type, %Changeset{data: data, types: types}, _name, _opts)
when data == nil or types == nil do
raise ArgumentError, "cast_#{type}/3 expects the changeset to be cast. " <>
"Please call cast/4 before calling cast_#{type}/3"
end
defp cast_relation(type, %Changeset{} = changeset, key, opts) do
{key, param_key} = cast_key(key)
%{data: data, types: types, params: params, changes: changes} = changeset
%{related: related} = relation = relation!(:cast, type, key, Map.get(types, key))
params = params || %{}
{changeset, required?} =
if opts[:required] do
{update_in(changeset.required, &[key|&1]), true}
else
{changeset, false}
end
on_cast = Keyword.get_lazy(opts, :with, fn -> on_cast_default(type, related) end)
original = Map.get(data, key)
changeset =
case Map.fetch(params, param_key) do
{:ok, value} ->
current = Relation.load!(data, original)
case Relation.cast(relation, data, value, current, on_cast) do
{:ok, change, relation_valid?} when change != original ->
valid? = changeset.valid? and relation_valid?
changes = Map.put(changes, key, change)
changeset = %{force_update(changeset, opts) | changes: changes, valid?: valid?}
missing_relation(changeset, key, current, required?, relation, opts)
{:error, {message, meta}} ->
meta = [validation: type] ++ meta
error = {key, {message(opts, :invalid_message, message), meta}}
%{changeset | errors: [error | changeset.errors], valid?: false}
# ignore or ok with change == original
_ ->
missing_relation(changeset, key, current, required?, relation, opts)
end
:error ->
missing_relation(changeset, key, original, required?, relation, opts)
end
update_in changeset.types[key], fn {type, relation} ->
{type, %{relation | on_cast: on_cast}}
end
end
defp on_cast_default(type, module) do
fn struct, params ->
try do
module.changeset(struct, params)
rescue
e in UndefinedFunctionError ->
case __STACKTRACE__ do
[{^module, :changeset, args_or_arity, _}] when args_or_arity == 2
when length(args_or_arity) == 2 ->
raise ArgumentError, """
the module #{inspect module} does not define a changeset/2 function,
which is used by cast_#{type}/3. You need to either:
1. implement the #{type}.changeset/2 function
2. pass the :with option to cast_#{type}/3 with an anonymous
function that expects 2 args or an MFA tuple
When using an inline embed, the :with option must be given
"""
stacktrace ->
reraise e, stacktrace
end
end
end
end
defp missing_relation(%{changes: changes, errors: errors} = changeset,
name, current, required?, relation, opts) do
current_changes = Map.get(changes, name, current)
if required? and Relation.empty?(relation, current_changes) do
errors = [{name, {message(opts, :required_message, "can't be blank"), [validation: :required]}} | errors]
%{changeset | errors: errors, valid?: false}
else
changeset
end
end
defp relation!(_op, type, _name, {type, relation}),
do: relation
defp relation!(op, :assoc, name, nil),
do: raise(ArgumentError, "cannot #{op} assoc `#{name}`, assoc `#{name}` not found. Make sure it is spelled correctly and that the association type is not read-only")
defp relation!(op, type, name, nil),
do: raise(ArgumentError, "cannot #{op} #{type} `#{name}`, #{type} `#{name}` not found. Make sure that it exists and is spelled correctly")
defp relation!(op, type, name, {other, _}) when other in @relations,
do: raise(ArgumentError, "expected `#{name}` to be an #{type} in `#{op}_#{type}`, got: `#{other}`")
defp relation!(op, type, name, schema_type),
do: raise(ArgumentError, "expected `#{name}` to be an #{type} in `#{op}_#{type}`, got: `#{inspect schema_type}`")
defp force_update(changeset, opts) do
if Keyword.get(opts, :force_update_on_change, true) do
put_in(changeset.repo_opts[:force], true)
else
changeset
end
end
## Working with changesets
@doc """
Merges two changesets.
This function merges two changesets provided they have been applied to the
same data (their `:data` field is equal); if the data differs, an
`ArgumentError` exception is raised. If one of the changesets has a `:repo`
field which is not `nil`, then the value of that field is used as the `:repo`
field of the resulting changeset; if both changesets have a non-`nil` and
different `:repo` field, an `ArgumentError` exception is raised.
The other fields are merged with the following criteria:
* `params` - params are merged (not deep-merged) giving precedence to the
params of `changeset2` in case of a conflict. If both changesets have their
`:params` fields set to `nil`, the resulting changeset will have its params
set to `nil` too.
* `changes` - changes are merged giving precedence to the `changeset2`
changes.
* `errors` and `validations` - they are simply concatenated.
* `required` - required fields are merged; all the fields that appear
in the required list of both changesets are moved to the required
list of the resulting changeset.
## Examples
iex> changeset1 = cast(%Post{}, %{title: "Title"}, [:title])
iex> changeset2 = cast(%Post{}, %{title: "New title", body: "Body"}, [:title, :body])
iex> changeset = merge(changeset1, changeset2)
iex> changeset.changes
%{body: "Body", title: "New title"}
iex> changeset1 = cast(%Post{body: "Body"}, %{title: "Title"}, [:title])
iex> changeset2 = cast(%Post{}, %{title: "New title"}, [:title])
iex> merge(changeset1, changeset2)
** (ArgumentError) different :data when merging changesets
"""
@spec merge(t, t) :: t
def merge(changeset1, changeset2)
def merge(%Changeset{data: data} = cs1, %Changeset{data: data} = cs2) do
new_repo = merge_identical(cs1.repo, cs2.repo, "repos")
new_repo_opts = Keyword.merge(cs1.repo_opts, cs2.repo_opts)
new_action = merge_identical(cs1.action, cs2.action, "actions")
new_filters = Map.merge(cs1.filters, cs2.filters)
new_validations = cs1.validations ++ cs2.validations
new_constraints = cs1.constraints ++ cs2.constraints
cast_merge %{cs1 | repo: new_repo, repo_opts: new_repo_opts, filters: new_filters,
action: new_action, validations: new_validations,
constraints: new_constraints}, cs2
end
def merge(%Changeset{}, %Changeset{}) do
raise ArgumentError, message: "different :data when merging changesets"
end
defp cast_merge(cs1, cs2) do
new_params = (cs1.params || cs2.params) && Map.merge(cs1.params || %{}, cs2.params || %{})
new_changes = Map.merge(cs1.changes, cs2.changes)
new_errors = Enum.uniq(cs1.errors ++ cs2.errors)
new_required = Enum.uniq(cs1.required ++ cs2.required)
new_types = cs1.types || cs2.types
new_valid? = cs1.valid? and cs2.valid?
%{cs1 | params: new_params, valid?: new_valid?, errors: new_errors, types: new_types,
changes: new_changes, required: new_required}
end
defp merge_identical(object, nil, _thing), do: object
defp merge_identical(nil, object, _thing), do: object
defp merge_identical(object, object, _thing), do: object
defp merge_identical(lhs, rhs, thing) do
raise ArgumentError, "different #{thing} (`#{inspect lhs}` and " <>
"`#{inspect rhs}`) when merging changesets"
end
@doc """
Fetches the given field from changes or from the data.
While `fetch_change/2` only looks at the current `changes`
to retrieve a value, this function looks at the changes and
then falls back on the data, finally returning `:error` if
no value is available.
For relations, these functions will return the changeset
original data with changes applied. To retrieve raw changesets,
please use `fetch_change/2`.
## Examples
iex> post = %Post{title: "Foo", body: "Bar baz bong"}
iex> changeset = change(post, %{title: "New title"})
iex> fetch_field(changeset, :title)
{:changes, "New title"}
iex> fetch_field(changeset, :body)
{:data, "Bar baz bong"}
iex> fetch_field(changeset, :not_a_field)
:error
"""
@spec fetch_field(t, atom) :: {:changes, term} | {:data, term} | :error
def fetch_field(%Changeset{changes: changes, data: data, types: types}, key) when is_atom(key) do
case Map.fetch(changes, key) do
{:ok, value} ->
{:changes, change_as_field(types, key, value)}
:error ->
case Map.fetch(data, key) do
{:ok, value} -> {:data, data_as_field(data, types, key, value)}
:error -> :error
end
end
end
@doc """
Same as `fetch_field/2` but returns the value or raises if the given key was not found.
## Examples
iex> post = %Post{title: "Foo", body: "Bar baz bong"}
iex> changeset = change(post, %{title: "New title"})
iex> fetch_field!(changeset, :title)
"New title"
iex> fetch_field!(changeset, :other)
** (KeyError) key :other not found in: %Post{...}
"""
@spec fetch_field!(t, atom) :: term
def fetch_field!(changeset, key) do
case fetch_field(changeset, key) do
{_, value} ->
value
:error ->
raise KeyError, key: key, term: changeset.data
end
end
@doc """
Gets a field from changes or from the data.
While `get_change/3` only looks at the current `changes`
to retrieve a value, this function looks at the changes and
then falls back on the data, finally returning `default` if
no value is available.
For relations, these functions will return the changeset data
with changes applied. To retrieve raw changesets, please use `get_change/3`.
iex> post = %Post{title: "A title", body: "My body is a cage"}
iex> changeset = change(post, %{title: "A new title"})
iex> get_field(changeset, :title)
"A new title"
iex> get_field(changeset, :not_a_field, "Told you, not a field!")
"Told you, not a field!"
"""
@spec get_field(t, atom, term) :: term
def get_field(%Changeset{changes: changes, data: data, types: types}, key, default \\ nil) do
case Map.fetch(changes, key) do
{:ok, value} ->
change_as_field(types, key, value)
:error ->
case Map.fetch(data, key) do
{:ok, value} -> data_as_field(data, types, key, value)
:error -> default
end
end
end
defp change_as_field(types, key, value) do
case Map.get(types, key) do
{tag, relation} when tag in @relations ->
Relation.apply_changes(relation, value)
_other ->
value
end
end
defp data_as_field(data, types, key, value) do
case Map.get(types, key) do
{tag, _relation} when tag in @relations ->
Relation.load!(data, value)
_other ->
value
end
end
@doc """
Fetches a change from the given changeset.
This function only looks at the `:changes` field of the given `changeset` and
returns `{:ok, value}` if the change is present or `:error` if it's not.
## Examples
iex> changeset = change(%Post{body: "foo"}, %{title: "bar"})
iex> fetch_change(changeset, :title)
{:ok, "bar"}
iex> fetch_change(changeset, :body)
:error
"""
@spec fetch_change(t, atom) :: {:ok, term} | :error
def fetch_change(%Changeset{changes: changes} = _changeset, key) when is_atom(key) do
Map.fetch(changes, key)
end
@doc """
Same as `fetch_change/2` but returns the value or raises if the given key was not found.
## Examples
iex> changeset = change(%Post{body: "foo"}, %{title: "bar"})
iex> fetch_change!(changeset, :title)
"bar"
iex> fetch_change!(changeset, :body)
** (KeyError) key :body not found in: %{title: "bar"}
"""
@spec fetch_change!(t, atom) :: term
def fetch_change!(changeset, key) do
case fetch_change(changeset, key) do
{:ok, value} ->
value
:error ->
raise KeyError, key: key, term: changeset.changes
end
end
@doc """
Gets a change or returns a default value.
## Examples
iex> changeset = change(%Post{body: "foo"}, %{title: "bar"})
iex> get_change(changeset, :title)
"bar"
iex> get_change(changeset, :body)
nil
"""
@spec get_change(t, atom, term) :: term
def get_change(%Changeset{changes: changes} = _changeset, key, default \\ nil) when is_atom(key) do
Map.get(changes, key, default)
end
@doc """
Updates a change.
The given `function` is invoked with the change value only if there
is a change for the given `key`. Note that the value of the change
can still be `nil` (unless the field was marked as required on `validate_required/3`).
## Examples
iex> changeset = change(%Post{}, %{impressions: 1})
iex> changeset = update_change(changeset, :impressions, &(&1 + 1))
iex> changeset.changes.impressions
2
"""
@spec update_change(t, atom, (term -> term)) :: t
def update_change(%Changeset{changes: changes} = changeset, key, function) when is_atom(key) do
case Map.fetch(changes, key) do
{:ok, value} ->
put_change(changeset, key, function.(value))
:error ->
changeset
end
end
@doc """
Puts a change on the given `key` with `value`.
`key` is an atom that represents any field, embed or
association in the changeset. Note the `value` is directly
stored in the changeset with no validation whatsoever.
For this reason, this function is meant for working with
data internal to the application.
If the change is already present, it is overridden with
the new value. If the change has the same value as in the
changeset data, it is not added to the list of changes.
When changing embeds and associations, see `put_assoc/4`
for a complete reference on the accepted values.
## Examples
iex> changeset = change(%Post{author: "bar"}, %{title: "foo"})
iex> changeset = put_change(changeset, :title, "bar")
iex> changeset.changes
%{title: "bar"}
iex> changeset = put_change(changeset, :author, "bar")
iex> changeset.changes
%{title: "bar"}
"""
@spec put_change(t, atom, term) :: t
def put_change(%Changeset{types: nil}, _key, _value) do
raise ArgumentError, "changeset does not have types information"
end
def put_change(%Changeset{data: data, types: types} = changeset, key, value) do
type = Map.get(types, key)
{changes, errors, valid?} =
put_change(data, changeset.changes, changeset.errors, changeset.valid?, key, value, type)
%{changeset | changes: changes, errors: errors, valid?: valid?}
end
defp put_change(data, changes, errors, valid?, key, value, {tag, relation})
when tag in @relations do
original = Map.get(data, key)
current = Relation.load!(data, original)
case Relation.change(relation, value, current) do
{:ok, change, relation_valid?} when change != original ->
{Map.put(changes, key, change), errors, valid? and relation_valid?}
{:error, error} ->
{changes, [{key, error} | errors], false}
# ignore or ok with change == original
_ ->
{Map.delete(changes, key), errors, valid?}
end
end
defp put_change(data, _changes, _errors, _valid?, key, _value, nil) when is_atom(key) do
raise ArgumentError, "unknown field `#{inspect(key)}` in #{inspect(data)}"
end
defp put_change(_data, _changes, _errors, _valid?, key, _value, nil) when not is_atom(key) do
raise ArgumentError, "field names given to change/put_change must be atoms, got: `#{inspect(key)}`"
end
defp put_change(data, changes, errors, valid?, key, value, type) do
if not Ecto.Type.equal?(type, Map.get(data, key), value) do
{Map.put(changes, key, value), errors, valid?}
else
{Map.delete(changes, key), errors, valid?}
end
end
@doc """
Puts the given association entry or entries as a change in the changeset.
This function is used to work with associations as a whole. For example,
if a Post has many Comments, it allows you to add, remove or change all
comments at once. If your goal is to simply add a new comment to a post,
then it is preferred to do so manually, as we will describe later in the
"Example: Adding a comment to a post" section.
This function requires the associated data to have been preloaded, except
when the parent changeset has been newly built and not yet persisted.
Missing data will invoke the `:on_replace` behaviour defined on the
association.
For associations with cardinality one, `nil` can be used to remove the existing
entry. For associations with many entries, an empty list may be given instead.
If the association has no changes, it will be skipped. If the association is
invalid, the changeset will be marked as invalid. If the given value is not any
of values below, it will raise.
The associated data may be given in different formats:
* a map or a keyword list representing changes to be applied to the
associated data. A map or keyword list can be given to update the
associated data as long as they have matching primary keys.
For example, `put_assoc(changeset, :comments, [%{id: 1, title: "changed"}])`
will locate the comment with `:id` of 1 and update its title.
If no comment with such id exists, one is created on the fly.
Since only a single comment was given, any other associated comment
will be replaced. On all cases, it is expected the keys to be atoms.
Opposite to `cast_assoc` and `embed_assoc`, the given map (or struct)
is not validated in any way and will be inserted as is.
This API is mostly used in scripts and tests, to make it straight-
forward to create schemas with associations at once, such as:
Ecto.Changeset.change(
%Post{},
title: "foo",
comments: [
%{body: "first"},
%{body: "second"}
]
)
* changesets or structs - when a changeset or struct is given, they
are treated as the canonical data and the associated data currently
stored in the association is ignored. For instance, the operation
`put_assoc(changeset, :comments, [%Comment{id: 1, title: "changed"}])`
will send the `Comment` as is to the database, ignoring any comment
currently associated, even if a matching ID is found. If the comment
is already persisted to the database, then `put_assoc/4` only takes
care of guaranteeing that the comments and the parent data are associated.
This extremely useful when associating existing data, as we will see
in the "Example: Adding tags to a post" section.
Once the parent changeset is given to an `Ecto.Repo` function, all entries
will be inserted/updated/deleted within the same transaction.
## Example: Adding a comment to a post
Imagine a relationship where Post has many comments and you want to add a
new comment to an existing post. While it is possible to use `put_assoc/4`
for this, it would be unnecessarily complex. Let's see an example.
First, let's fetch the post with all existing comments:
post = Post |> Repo.get!(1) |> Repo.preload(:comments)
The following approach is **wrong**:
post
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_assoc(:comments, [%Comment{body: "bad example!"}])
|> Repo.update!()
The reason why the example above is wrong is because `put_assoc/4` always
works with the **full data**. So the example above will effectively **erase
all previous comments** and only keep the comment you are currently adding.
Instead, you could try:
post
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_assoc(:comments, [%Comment{body: "so-so example!"} | post.comments])
|> Repo.update!()
In this example, we prepend the new comment to the list of existing comments.
Ecto will diff the list of comments currently in `post` with the list of comments
given, and correctly insert the new comment to the database. Note, however,
Ecto is doing a lot of work just to figure out something we knew since the
beginning, which is that there is only one new comment.
In cases like above, when you want to work only on a single entry, it is
much easier to simply work on the associated directly. For example, we
could instead set the `post` association in the comment:
%Comment{body: "better example"}
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_assoc(:post, post)
|> Repo.insert!()
Alternatively, we can make sure that when we create a comment, it is already
associated to the post:
Ecto.build_assoc(post, :comments)
|> Ecto.Changeset.change(body: "great example!")
|> Repo.insert!()
Or we can simply set the post_id in the comment itself:
%Comment{body: "better example", post_id: post.id}
|> Repo.insert!()
In other words, when you find yourself wanting to work only with a subset
of the data, then using `put_assoc/4` is most likely unnecessary. Instead,
you want to work on the other side of the association.
Let's see an example where using `put_assoc/4` is a good fit.
## Example: Adding tags to a post
Imagine you are receiving a set of tags you want to associate to a post.
Let's imagine that those tags exist upfront and are all persisted to the
database. Imagine we get the data in this format:
params = %{"title" => "new post", "tags" => ["learner"]}
Now, since the tags already exist, we will bring all of them from the
database and put them directly in the post:
tags = Repo.all(from t in Tag, where: t.name in ^params["tags"])
post
|> Repo.preload(:tags)
|> Ecto.Changeset.cast(params, [:title]) # No need to allow :tags as we put them directly
|> Ecto.Changeset.put_assoc(:tags, tags) # Explicitly set the tags
Since in this case we always require the user to pass all tags
directly, using `put_assoc/4` is a great fit. It will automatically
remove any tag not given and properly associate all of the given
tags with the post.
Furthermore, since the tag information is given as structs read directly
from the database, Ecto will treat the data as correct and only do the
minimum necessary to guarantee that posts and tags are associated,
without trying to update or diff any of the fields in the tag struct.
Although it accepts an `opts` argument, there are no options currently
supported by `put_assoc/4`.
"""
def put_assoc(%Changeset{} = changeset, name, value, opts \\ []) do
put_relation(:assoc, changeset, name, value, opts)
end
@doc """
Puts the given embed entry or entries as a change in the changeset.
This function is used to work with embeds as a whole. For embeds with
cardinality one, `nil` can be used to remove the existing entry. For
embeds with many entries, an empty list may be given instead.
If the embed has no changes, it will be skipped. If the embed is
invalid, the changeset will be marked as invalid.
The list of supported values and their behaviour is described in
`put_assoc/4`. If the given value is not any of values listed there,
it will raise.
Although this function accepts an `opts` argument, there are no options
currently supported by `put_embed/4`.
"""
def put_embed(%Changeset{} = changeset, name, value, opts \\ []) do
put_relation(:embed, changeset, name, value, opts)
end
defp put_relation(_tag, %{types: nil}, _name, _value, _opts) do
raise ArgumentError, "changeset does not have types information"
end
defp put_relation(tag, changeset, name, value, _opts) do
%{data: data, types: types, changes: changes, errors: errors, valid?: valid?} = changeset
relation = relation!(:put, tag, name, Map.get(types, name))
{changes, errors, valid?} =
put_change(data, changes, errors, valid?, name, value, {tag, relation})
%{changeset | changes: changes, errors: errors, valid?: valid?}
end
@doc """
Forces a change on the given `key` with `value`.
If the change is already present, it is overridden with
the new value.
## Examples
iex> changeset = change(%Post{author: "bar"}, %{title: "foo"})
iex> changeset = force_change(changeset, :title, "bar")
iex> changeset.changes
%{title: "bar"}
iex> changeset = force_change(changeset, :author, "bar")
iex> changeset.changes
%{title: "bar", author: "bar"}
"""
@spec force_change(t, atom, term) :: t
def force_change(%Changeset{types: nil}, _key, _value) do
raise ArgumentError, "changeset does not have types information"
end
def force_change(%Changeset{types: types} = changeset, key, value) do
case Map.get(types, key) do
{tag, _} when tag in @relations ->
raise "changing #{tag}s with force_change/3 is not supported, " <>
"please use put_#{tag}/4 instead"
nil ->
raise ArgumentError, "unknown field `#{inspect(key)}` in #{inspect(changeset.data)}"
_ ->
put_in changeset.changes[key], value
end
end
@doc """
Deletes a change with the given key.
## Examples
iex> changeset = change(%Post{}, %{title: "foo"})
iex> changeset = delete_change(changeset, :title)
iex> get_change(changeset, :title)
nil
"""
@spec delete_change(t, atom) :: t
def delete_change(%Changeset{} = changeset, key) when is_atom(key) do
update_in changeset.changes, &Map.delete(&1, key)
end
@doc """
Applies the changeset changes to the changeset data.
This operation will return the underlying data with changes
regardless if the changeset is valid or not.
## Examples
iex> changeset = change(%Post{author: "bar"}, %{title: "foo"})
iex> apply_changes(changeset)
%Post{author: "bar", title: "foo"}
"""
@spec apply_changes(t) :: Ecto.Schema.t | data
def apply_changes(%Changeset{changes: changes, data: data}) when changes == %{} do
data
end
def apply_changes(%Changeset{changes: changes, data: data, types: types}) do
Enum.reduce(changes, data, fn {key, value}, acc ->
case Map.fetch(types, key) do
{:ok, {tag, relation}} when tag in @relations ->
apply_relation_changes(acc, key, relation, value)
{:ok, _} ->
Map.put(acc, key, value)
:error ->
acc
end
end)
end
@doc """
Applies the changeset action only if the changes are valid.
If the changes are valid, all changes are applied to the changeset data.
If the changes are invalid, no changes are applied, and an error tuple
is returned with the changeset containing the action that was attempted
to be applied.
The action may be any atom.
## Examples
iex> {:ok, data} = apply_action(changeset, :update)
iex> {:error, changeset} = apply_action(changeset, :update)
%Ecto.Changeset{action: :update}
"""
@spec apply_action(t, atom) :: {:ok, Ecto.Schema.t() | data} | {:error, t}
def apply_action(%Changeset{} = changeset, action) when is_atom(action) do
if changeset.valid? do
{:ok, apply_changes(changeset)}
else
{:error, %Changeset{changeset | action: action}}
end
end
def apply_action(%Changeset{}, action) do
raise ArgumentError, "expected action to be an atom, got: #{inspect action}"
end
@doc """
Applies the changeset action if the changes are valid or raises an error.
## Examples
iex> changeset = change(%Post{author: "bar"}, %{title: "foo"})
iex> apply_action!(changeset, :update)
%Post{author: "bar", title: "foo"}
iex> changeset = change(%Post{author: "bar"}, %{title: :bad})
iex> apply_action!(changeset, :update)
** (Ecto.InvalidChangesetError) could not perform update because changeset is invalid.
See `apply_action/2` for more information.
"""
@spec apply_action!(t, atom) :: Ecto.Schema.t() | data
def apply_action!(%Changeset{} = changeset, action) do
case apply_action(changeset, action) do
{:ok, data} ->
data
{:error, changeset} ->
raise Ecto.InvalidChangesetError, action: action, changeset: changeset
end
end
## Validations
@doc ~S"""
Returns a keyword list of the validations for this changeset.
The keys in the list are the names of fields, and the values are a
validation associated with the field. A field may occur multiple
times in the list.
## Example
%Post{}
|> change()
|> validate_format(:title, ~r/^\w+:\s/, message: "must start with a topic")
|> validate_length(:title, max: 100)
|> validations()
#=> [
title: {:length, [ max: 100 ]},
title: {:format, ~r/^\w+:\s/}
]
The following validations may be included in the result. The list is
not necessarily exhaustive. For example, custom validations written
by the developer will also appear in our return value.
This first group contains validations that hold a keyword list of validators.
This list may also include a `:message` key.
* `{:length, [option]}`
* `min: n`
* `max: n`
* `is: n`
* `count: :graphemes | :codepoints`
* `{:number, [option]}`
* `equal_to: n`
* `greater_than: n`
* `greater_than_or_equal_to: n`
* `less_than: n`
* `less_than_or_equal_to: n`
The other validators simply take a value:
* `{:exclusion, Enum.t}`
* `{:format, ~r/pattern/}`
* `{:inclusion, Enum.t}`
* `{:subset, Enum.t}`
Note that calling `validate_required/3` does not store the validation under the
`changeset.validations` key (and so won't be included in the result of this
function). The required fields are stored under the `changeset.required` key.
"""
@spec validations(t) :: [{atom, term}]
def validations(%Changeset{validations: validations}) do
validations
end
@doc """
Adds an error to the changeset.
An additional keyword list `keys` can be passed to provide additional
contextual information for the error. This is useful when using
`traverse_errors/2` and when translating errors with `Gettext`
## Examples
iex> changeset = change(%Post{}, %{title: ""})
iex> changeset = add_error(changeset, :title, "empty")
iex> changeset.errors
[title: {"empty", []}]
iex> changeset.valid?
false
iex> changeset = change(%Post{}, %{title: ""})
iex> changeset = add_error(changeset, :title, "empty", additional: "info")
iex> changeset.errors
[title: {"empty", [additional: "info"]}]
iex> changeset.valid?
false
iex> changeset = change(%Post{}, %{tags: ["ecto", "elixir", "x"]})
iex> changeset = add_error(changeset, :tags, "tag '%{val}' is too short", val: "x")
iex> changeset.errors
[tags: {"tag '%{val}' is too short", [val: "x"]}]
iex> changeset.valid?
false
"""
@spec add_error(t, atom, String.t, Keyword.t) :: t
def add_error(%Changeset{errors: errors} = changeset, key, message, keys \\ []) when is_binary(message) do
%{changeset | errors: [{key, {message, keys}}|errors], valid?: false}
end
@doc """
Validates the given `field` change.
It invokes the `validator` function to perform the validation
only if a change for the given `field` exists and the change
value is not `nil`. The function must return a list of errors
(with an empty list meaning no errors).
In case there's at least one error, the list of errors will be appended to the
`:errors` field of the changeset and the `:valid?` flag will be set to
`false`.
## Examples
iex> changeset = change(%Post{}, %{title: "foo"})
iex> changeset = validate_change changeset, :title, fn :title, title ->
...> # Value must not be "foo"!
...> if title == "foo" do
...> [title: "cannot be foo"]
...> else
...> []
...> end
...> end
iex> changeset.errors
[title: {"cannot be foo", []}]
"""
@spec validate_change(t, atom, (atom, term -> [{atom, String.t} | {atom, {String.t, Keyword.t}}])) :: t
def validate_change(%Changeset{} = changeset, field, validator) when is_atom(field) do
%{changes: changes, errors: errors} = changeset
ensure_field_exists!(changeset, field)
value = Map.get(changes, field)
new = if is_nil(value), do: [], else: validator.(field, value)
new =
Enum.map(new, fn
{key, val} when is_atom(key) and is_binary(val) ->
{key, {val, []}}
{key, {val, opts}} when is_atom(key) and is_binary(val) and is_list(opts) ->
{key, {val, opts}}
end)
case new do
[] -> changeset
[_|_] -> %{changeset | errors: new ++ errors, valid?: false}
end
end
@doc """
Stores the validation `metadata` and validates the given `field` change.
Similar to `validate_change/3` but stores the validation metadata
into the changeset validators. The validator metadata is often used
as a reflection mechanism, to automatically generate code based on
the available validations.
## Examples
iex> changeset = change(%Post{}, %{title: "foo"})
iex> changeset = validate_change changeset, :title, :useless_validator, fn
...> _, _ -> []
...> end
iex> changeset.validations
[title: :useless_validator]
"""
@spec validate_change(t, atom, term, (atom, term -> [{atom, String.t} | {atom, {String.t, Keyword.t}}])) :: t
def validate_change(%Changeset{validations: validations} = changeset,
field, metadata, validator) do
changeset = %{changeset | validations: [{field, metadata}|validations]}
validate_change(changeset, field, validator)
end
@doc """
Validates that one or more fields are present in the changeset.
You can pass a single field name or a list of field names that
are required.
If the value of a field is `nil` or a string made only of whitespace,
the changeset is marked as invalid, the field is removed from the
changeset's changes, and an error is added. An error won't be added if
the field already has an error.
If a field is given to `validate_required/3` but it has not been passed
as parameter during `cast/3` (i.e. it has not been changed), then
`validate_required/3` will check for its current value in the data.
If the data contains an non-empty value for the field, then no error is
added. This allows developers to use `validate_required/3` to perform
partial updates. For example, on `insert` all fields would be required,
because their default values on the data are all `nil`, but on `update`,
if you don't want to change a field that has been previously set,
you are not required to pass it as a parameter, since `validate_required/3`
won't add an error for missing changes as long as the value in the
data given to the `changeset` is not empty.
Do not use this function to validate associations are required,
instead pass the `:required` option to `cast_assoc/3`.
Opposite to other validations, calling this function does not store
the validation under the `changeset.validations` key. Instead, it
stores all required fields under `changeset.required`.
## Options
* `:message` - the message on failure, defaults to "can't be blank"
* `:trim` - a boolean that sets whether whitespaces are removed before
running the validation on binaries/strings, defaults to true
## Examples
validate_required(changeset, :title)
validate_required(changeset, [:title, :body])
"""
@spec validate_required(t, list | atom, Keyword.t) :: t
def validate_required(%Changeset{} = changeset, fields, opts \\ []) when not is_nil(fields) do
%{required: required, errors: errors, changes: changes} = changeset
trim = Keyword.get(opts, :trim, true)
fields = List.wrap(fields)
fields_with_errors =
for field <- fields,
missing?(changeset, field, trim),
ensure_field_exists!(changeset, field),
is_nil(errors[field]),
do: field
case fields_with_errors do
[] ->
%{changeset | required: fields ++ required}
_ ->
message = message(opts, "can't be blank")
new_errors = Enum.map(fields_with_errors, &{&1, {message, [validation: :required]}})
changes = Map.drop(changes, fields_with_errors)
%{changeset | changes: changes, required: fields ++ required, errors: new_errors ++ errors, valid?: false}
end
end
@doc """
Validates that no existing record with a different primary key
has the same values for these fields.
This function exists to provide quick feedback to users of your
application. It should not be relied on for any data guarantee as it
has race conditions and is inherently unsafe. For example, if this
check happens twice in the same time interval (because the user
submitted a form twice), both checks may pass and you may end-up with
duplicate entries in the database. Therefore, a `unique_constraint/3`
should also be used to ensure your data won't get corrupted.
However, because constraints are only checked if all validations
succeed, this function can be used as an early check to provide
early feedback to users, since most conflicting data will have been
inserted prior to the current validation phase.
## Options
* `:message` - the message in case the constraint check fails,
defaults to "has already been taken".
* `:match` - how the changeset constraint name is matched against the
repo constraint, may be `:exact` or `:suffix`. Defaults to `:exact`.
`:suffix` matches any repo constraint which `ends_with?` `:name`
to this changeset constraint.
* `:error_key` - the key to which changeset error will be added when
check fails, defaults to the first field name of the given list of
fields.
* `:prefix` - the prefix to run the query on (such as the schema path
in Postgres or the database in MySQL). See `Ecto.Repo` documentation
for more information.
* `:repo_opts` - the options to pass to the `Ecto.Repo` call.
* `:query` - the base query to use for the check. Defaults to the schema of
the changeset. If the primary key is set, a clause will be added to exclude
the changeset row itself from the check.
## Examples
unsafe_validate_unique(changeset, :city_name, repo)
unsafe_validate_unique(changeset, [:city_name, :state_name], repo)
unsafe_validate_unique(changeset, [:city_name, :state_name], repo, message: "city must be unique within state")
unsafe_validate_unique(changeset, [:city_name, :state_name], repo, prefix: "public")
unsafe_validate_unique(changeset, [:city_name, :state_name], repo, query: from(c in City, where: is_nil(c.deleted_at)))
"""
@spec unsafe_validate_unique(t, atom | [atom, ...], Ecto.Repo.t, Keyword.t) :: t
def unsafe_validate_unique(changeset, fields, repo, opts \\ []) when is_list(opts) do
fields = List.wrap(fields)
{repo_opts, opts} = Keyword.pop(opts, :repo_opts, [])
{validations, schema} =
case changeset do
%Ecto.Changeset{validations: validations, data: %schema{}} ->
{validations, schema}
%Ecto.Changeset{} ->
raise ArgumentError, "unsafe_validate_unique/4 does not work with schemaless changesets"
end
changeset = %{changeset | validations: [{:unsafe_unique, fields} | validations]}
where_clause = for field <- fields do
{field, get_field(changeset, field)}
end
# No need to query if there is a prior error for the fields
any_prior_errors_for_fields? = Enum.any?(changeset.errors, &(elem(&1, 0) in fields))
# No need to query if we haven't changed any of the fields in question
unrelated_changes? = Enum.all?(fields, ¬ Map.has_key?(changeset.changes, &1))
# If we don't have values for all fields, we can't query for uniqueness
any_nil_values_for_fields? = Enum.any?(where_clause, &(&1 |> elem(1) |> is_nil()))
if unrelated_changes? || any_nil_values_for_fields? || any_prior_errors_for_fields? do
changeset
else
query =
Keyword.get(opts, :query, schema)
|> maybe_exclude_itself(schema, changeset)
|> Ecto.Query.where(^where_clause)
|> Ecto.Query.select(true)
|> Ecto.Query.limit(1)
query =
if prefix = opts[:prefix] do
Ecto.Query.put_query_prefix(query, prefix)
else
query
end
if repo.one(query, repo_opts) do
error_key = Keyword.get(opts, :error_key, hd(fields))
add_error(changeset, error_key, message(opts, "has already been taken"),
validation: :unsafe_unique, fields: fields)
else
changeset
end
end
end
defp maybe_exclude_itself(base_query, schema, changeset) do
:primary_key
|> schema.__schema__()
|> Enum.map(&{&1, get_field(changeset, &1)})
|> case do
[{_pk_field, nil} | _remaining_pks] ->
base_query
[{pk_field, value} | remaining_pks] ->
# generate a clean query (one that does not start with 'TRUE OR ...')
first_expr = Ecto.Query.dynamic([q], field(q, ^pk_field) == ^value)
Enum.reduce_while(remaining_pks, first_expr, fn
{_pk_field, nil}, _expr ->
{:halt, nil}
{pk_field, value}, expr ->
{:cont, Ecto.Query.dynamic([q], ^expr and field(q, ^pk_field) == ^value)}
end)
|> case do
nil ->
base_query
matches_pk ->
Ecto.Query.where(base_query, ^Ecto.Query.dynamic(not (^matches_pk)))
end
[] ->
base_query
end
end
defp ensure_field_exists!(%Changeset{types: types, data: data}, field) do
unless Map.has_key?(types, field) do
raise ArgumentError, "unknown field #{inspect(field)} in #{inspect(data)}"
end
true
end
defp missing?(changeset, field, trim) when is_atom(field) do
case get_field(changeset, field) do
%{__struct__: Ecto.Association.NotLoaded} ->
raise ArgumentError, "attempting to validate association `#{field}` " <>
"that was not loaded. Please preload your associations " <>
"before calling validate_required/3 or pass the :required " <>
"option to Ecto.Changeset.cast_assoc/3"
value when is_binary(value) and trim -> String.trim_leading(value) == ""
value when is_binary(value) -> value == ""
nil -> true
_ -> false
end
end
defp missing?(_changeset, field, _trim) do
raise ArgumentError, "validate_required/3 expects field names to be atoms, got: `#{inspect field}`"
end
@doc """
Validates a change has the given format.
The format has to be expressed as a regular expression.
## Options
* `:message` - the message on failure, defaults to "has invalid format"
## Examples
validate_format(changeset, :email, ~r/@/)
"""
@spec validate_format(t, atom, Regex.t, Keyword.t) :: t
def validate_format(changeset, field, format, opts \\ []) do
validate_change changeset, field, {:format, format}, fn _, value ->
if value =~ format, do: [], else: [{field, {message(opts, "has invalid format"), [validation: :format]}}]
end
end
@doc """
Validates a change is included in the given enumerable.
## Options
* `:message` - the message on failure, defaults to "is invalid"
## Examples
validate_inclusion(changeset, :cardinal_direction, ["north", "east", "south", "west"])
validate_inclusion(changeset, :age, 0..99)
"""
@spec validate_inclusion(t, atom, Enum.t, Keyword.t) :: t
def validate_inclusion(changeset, field, data, opts \\ []) do
validate_change changeset, field, {:inclusion, data}, fn _, value ->
if value in data,
do: [],
else: [{field, {message(opts, "is invalid"), [validation: :inclusion, enum: data]}}]
end
end
@doc ~S"""
Validates a change, of type enum, is a subset of the given enumerable.
This validates if a list of values belongs to the given enumerable.
If you need to validate if a single value is inside the given enumerable,
you should use `validate_inclusion/4` instead.
## Options
* `:message` - the message on failure, defaults to "has an invalid entry"
## Examples
validate_subset(changeset, :pets, ["cat", "dog", "parrot"])
validate_subset(changeset, :lottery_numbers, 0..99)
"""
@spec validate_subset(t, atom, Enum.t, Keyword.t) :: t
def validate_subset(changeset, field, data, opts \\ []) do
validate_change changeset, field, {:subset, data}, fn _, value ->
case Enum.any?(value, fn(x) -> not(x in data) end) do
true -> [{field, {message(opts, "has an invalid entry"), [validation: :subset, enum: data]}}]
false -> []
end
end
end
@doc """
Validates a change is not included in the given enumerable.
## Options
* `:message` - the message on failure, defaults to "is reserved"
## Examples
validate_exclusion(changeset, :name, ~w(admin superadmin))
"""
@spec validate_exclusion(t, atom, Enum.t, Keyword.t) :: t
def validate_exclusion(changeset, field, data, opts \\ []) do
validate_change changeset, field, {:exclusion, data}, fn _, value ->
if value in data, do:
[{field, {message(opts, "is reserved"), [validation: :exclusion, enum: data]}}], else: []
end
end
@doc """
Validates a change is a string or list of the given length.
Note that the length of a string is counted in graphemes by default. If using
this validation to match a character limit of a database backend,
it's likely that the limit ignores graphemes and limits the number
of unicode characters. Then consider using the `:count` option to
limit the number of codepoints (`:codepoints`), or limit the number of bytes (`:bytes`).
## Options
* `:is` - the length must be exactly this value
* `:min` - the length must be greater than or equal to this value
* `:max` - the length must be less than or equal to this value
* `:count` - what length to count for string, `:graphemes` (default), `:codepoints` or `:bytes`
* `:message` - the message on failure, depending on the validation, is one of:
* for strings:
* "should be %{count} character(s)"
* "should be at least %{count} character(s)"
* "should be at most %{count} character(s)"
* for binary:
* "should be %{count} byte(s)"
* "should be at least %{count} byte(s)"
* "should be at most %{count} byte(s)"
* for lists:
* "should have %{count} item(s)"
* "should have at least %{count} item(s)"
* "should have at most %{count} item(s)"
## Examples
validate_length(changeset, :title, min: 3)
validate_length(changeset, :title, max: 100)
validate_length(changeset, :title, min: 3, max: 100)
validate_length(changeset, :code, is: 9)
validate_length(changeset, :topics, is: 2)
validate_length(changeset, :icon, count: :bytes, max: 1024 * 16)
"""
@spec validate_length(t, atom, Keyword.t) :: t
def validate_length(changeset, field, opts) when is_list(opts) do
validate_change changeset, field, {:length, opts}, fn
_, value ->
count_type = opts[:count] || :graphemes
{type, length} = case {value, count_type} do
{value, :codepoints} when is_binary(value) ->
{:string, codepoints_length(value, 0)}
{value, :graphemes} when is_binary(value) ->
{:string, String.length(value)}
{value, :bytes} when is_binary(value) ->
{:binary, byte_size(value)}
{value, _} when is_list(value) ->
{:list, list_length(changeset, field, value)}
end
error = ((is = opts[:is]) && wrong_length(type, length, is, opts)) ||
((min = opts[:min]) && too_short(type, length, min, opts)) ||
((max = opts[:max]) && too_long(type, length, max, opts))
if error, do: [{field, error}], else: []
end
end
defp codepoints_length(<<_::utf8, rest::binary>>, acc), do: codepoints_length(rest, acc + 1)
defp codepoints_length(<<_, rest::binary>>, acc), do: codepoints_length(rest, acc + 1)
defp codepoints_length(<<>>, acc), do: acc
defp list_length(%{types: types}, field, value) do
case Map.fetch(types, field) do
{:ok, {tag, _association}} when tag in [:embed, :assoc] ->
length(Relation.filter_empty(value))
_ ->
length(value)
end
end
defp wrong_length(_type, value, value, _opts), do: nil
defp wrong_length(:string, _length, value, opts), do:
{message(opts, "should be %{count} character(s)"), count: value, validation: :length, kind: :is, type: :string}
defp wrong_length(:binary, _length, value, opts), do:
{message(opts, "should be %{count} byte(s)"), count: value, validation: :length, kind: :is, type: :binary}
defp wrong_length(:list, _length, value, opts), do:
{message(opts, "should have %{count} item(s)"), count: value, validation: :length, kind: :is, type: :list}
defp too_short(_type, length, value, _opts) when length >= value, do: nil
defp too_short(:string, _length, value, opts), do:
{message(opts, "should be at least %{count} character(s)"), count: value, validation: :length, kind: :min, type: :string}
defp too_short(:binary, _length, value, opts), do:
{message(opts, "should be at least %{count} byte(s)"), count: value, validation: :length, kind: :min, type: :binary}
defp too_short(:list, _length, value, opts), do:
{message(opts, "should have at least %{count} item(s)"), count: value, validation: :length, kind: :min, type: :list}
defp too_long(_type, length, value, _opts) when length <= value, do: nil
defp too_long(:string, _length, value, opts), do:
{message(opts, "should be at most %{count} character(s)"), count: value, validation: :length, kind: :max, type: :string}
defp too_long(:binary, _length, value, opts), do:
{message(opts, "should be at most %{count} byte(s)"), count: value, validation: :length, kind: :max, type: :binary}
defp too_long(:list, _length, value, opts), do:
{message(opts, "should have at most %{count} item(s)"), count: value, validation: :length, kind: :max, type: :list}
@doc """
Validates the properties of a number.
## Options
* `:less_than`
* `:greater_than`
* `:less_than_or_equal_to`
* `:greater_than_or_equal_to`
* `:equal_to`
* `:not_equal_to`
* `:message` - the message on failure, defaults to one of:
* "must be less than %{number}"
* "must be greater than %{number}"
* "must be less than or equal to %{number}"
* "must be greater than or equal to %{number}"
* "must be equal to %{number}"
* "must be not equal to %{number}"
## Examples
validate_number(changeset, :count, less_than: 3)
validate_number(changeset, :pi, greater_than: 3, less_than: 4)
validate_number(changeset, :the_answer_to_life_the_universe_and_everything, equal_to: 42)
"""
@spec validate_number(t, atom, Keyword.t) :: t
def validate_number(changeset, field, opts) do
validate_change changeset, field, {:number, opts}, fn
field, value ->
{message, opts} = Keyword.pop(opts, :message)
Enum.find_value opts, [], fn {spec_key, target_value} ->
case Map.fetch(@number_validators, spec_key) do
{:ok, {spec_function, default_message}} ->
validate_number(field, value, message || default_message,
spec_key, spec_function, target_value)
:error ->
supported_options = @number_validators |> Map.keys() |> Enum.map_join("\n", &" * #{inspect(&1)}")
raise ArgumentError, """
unknown option #{inspect spec_key} given to validate_number/3
The supported options are:
#{supported_options}
"""
end
end
end
end
defp validate_number(field, %Decimal{} = value, message, spec_key, _spec_function, target_value) do
result = Decimal.compare(value, decimal_new(target_value)) |> normalize_compare()
case decimal_compare(result, spec_key) do
true -> nil
false -> [{field, {message, validation: :number, kind: spec_key, number: target_value}}]
end
end
defp validate_number(field, value, message, spec_key, spec_function, target_value) do
case apply(spec_function, [value, target_value]) do
true -> nil
false -> [{field, {message, validation: :number, kind: spec_key, number: target_value}}]
end
end
# TODO: Remove me once we support Decimal 2.0 only
# Support mismatch between API for Decimal.compare/2 for versions 1.6 and 2.0
defp normalize_compare(result) do
case result do
%Decimal{coef: 1, sign: -1} -> :lt
%Decimal{coef: 0} -> :eq
%Decimal{coef: 1, sign: 1} -> :gt
_ -> result
end
end
defp decimal_new(term) when is_float(term), do: Decimal.from_float(term)
defp decimal_new(term), do: Decimal.new(term)
defp decimal_compare(:lt, spec), do: spec in [:less_than, :less_than_or_equal_to, :not_equal_to]
defp decimal_compare(:gt, spec), do: spec in [:greater_than, :greater_than_or_equal_to, :not_equal_to]
defp decimal_compare(:eq, spec), do: spec in [:equal_to, :less_than_or_equal_to, :greater_than_or_equal_to]
@doc """
Validates that the given parameter matches its confirmation.
By calling `validate_confirmation(changeset, :email)`, this
validation will check if both "email" and "email_confirmation"
in the parameter map matches. Note this validation only looks
at the parameters themselves, never the fields in the schema.
As such as, the "email_confirmation" field does not need to be
added as a virtual field in your schema.
Note that if the confirmation field is nil or missing, this does
not add a validation error. You can specify that the confirmation
parameter is required in the options (see below).
## Options
* `:message` - the message on failure, defaults to "does not match confirmation"
* `:required` - boolean, sets whether existence of confirmation parameter
is required for addition of error. Defaults to false
## Examples
validate_confirmation(changeset, :email)
validate_confirmation(changeset, :password, message: "does not match password")
cast(data, params, [:password])
|> validate_confirmation(:password, message: "does not match password")
"""
@spec validate_confirmation(t, atom, Keyword.t) :: t
def validate_confirmation(changeset, field, opts \\ [])
def validate_confirmation(%{params: params} = changeset, field, opts) when is_map(params) do
param = Atom.to_string(field)
error_param = "#{param}_confirmation"
error_field = String.to_atom(error_param)
value = Map.get(params, param)
errors =
case Map.fetch(params, error_param) do
{:ok, ^value} ->
[]
{:ok, _} ->
[{error_field,
{message(opts, "does not match confirmation"), [validation: :confirmation]}}]
:error ->
confirmation_missing(opts, error_field)
end
%{changeset | validations: [{field, {:confirmation, opts}} | changeset.validations],
errors: errors ++ changeset.errors,
valid?: changeset.valid? and errors == []}
end
def validate_confirmation(%{params: nil} = changeset, _, _) do
changeset
end
defp confirmation_missing(opts, error_field) do
required = Keyword.get(opts, :required, false)
if required, do: [{error_field, {message(opts, "can't be blank"), [validation: :required]}}], else: []
end
defp message(opts, key \\ :message, default) do
Keyword.get(opts, key, default)
end
@doc """
Validates the given parameter is true.
Note this validation only checks the parameter itself is true, never
the field in the schema. That's because acceptance parameters do not need
to be persisted, as by definition they would always be stored as `true`.
## Options
* `:message` - the message on failure, defaults to "must be accepted"
## Examples
validate_acceptance(changeset, :terms_of_service)
validate_acceptance(changeset, :rules, message: "please accept rules")
"""
@spec validate_acceptance(t, atom, Keyword.t) :: t
def validate_acceptance(changeset, field, opts \\ [])
def validate_acceptance(%{params: params} = changeset, field, opts) do
errors = validate_acceptance_errors(params, field, opts)
%{changeset | validations: [{field, {:acceptance, opts}} | changeset.validations],
errors: errors ++ changeset.errors,
valid?: changeset.valid? and errors == []}
end
defp validate_acceptance_errors(nil, _field, _opts), do: []
defp validate_acceptance_errors(params, field, opts) do
param = Atom.to_string(field)
value = Map.get(params, param)
case Ecto.Type.cast(:boolean, value) do
{:ok, true} -> []
_ -> [{field, {message(opts, "must be accepted"), validation: :acceptance}}]
end
end
## Optimistic lock
@doc ~S"""
Applies optimistic locking to the changeset.
[Optimistic
locking](http://en.wikipedia.org/wiki/Optimistic_concurrency_control) (or
*optimistic concurrency control*) is a technique that allows concurrent edits
on a single record. While pessimistic locking works by locking a resource for
an entire transaction, optimistic locking only checks if the resource changed
before updating it.
This is done by regularly fetching the record from the database, then checking
whether another user has made changes to the record *only when updating the
record*. This behaviour is ideal in situations where the chances of concurrent
updates to the same record are low; if they're not, pessimistic locking or
other concurrency patterns may be more suited.
## Usage
Optimistic locking works by keeping a "version" counter for each record; this
counter gets incremented each time a modification is made to a record. Hence,
in order to use optimistic locking, a field must exist in your schema for
versioning purpose. Such field is usually an integer but other types are
supported.
## Examples
Assuming we have a `Post` schema (stored in the `posts` table), the first step
is to add a version column to the `posts` table:
alter table(:posts) do
add :lock_version, :integer, default: 1
end
The column name is arbitrary and doesn't need to be `:lock_version`. Now add
a field to the schema too:
defmodule Post do
use Ecto.Schema
schema "posts" do
field :title, :string
field :lock_version, :integer, default: 1
end
def changeset(:update, struct, params \\ %{}) do
struct
|> Ecto.Changeset.cast(params, [:title])
|> Ecto.Changeset.optimistic_lock(:lock_version)
end
end
Now let's take optimistic locking for a spin:
iex> post = Repo.insert!(%Post{title: "foo"})
%Post{id: 1, title: "foo", lock_version: 1}
iex> valid_change = Post.changeset(:update, post, %{title: "bar"})
iex> stale_change = Post.changeset(:update, post, %{title: "baz"})
iex> Repo.update!(valid_change)
%Post{id: 1, title: "bar", lock_version: 2}
iex> Repo.update!(stale_change)
** (Ecto.StaleEntryError) attempted to update a stale entry:
%Post{id: 1, title: "baz", lock_version: 1}
When a conflict happens (a record which has been previously fetched is
being updated, but that same record has been modified since it was
fetched), an `Ecto.StaleEntryError` exception is raised.
Optimistic locking also works with delete operations. Just call the
`optimistic_lock/3` function with the data before delete:
iex> changeset = Ecto.Changeset.optimistic_lock(post, :lock_version)
iex> Repo.delete(changeset)
`optimistic_lock/3` by default assumes the field
being used as a lock is an integer. If you want to use another type,
you need to pass the third argument customizing how the next value
is generated:
iex> Ecto.Changeset.optimistic_lock(post, :lock_uuid, fn _ -> Ecto.UUID.generate end)
"""
@spec optimistic_lock(Ecto.Schema.t | t, atom, (term -> term)) :: t
def optimistic_lock(data_or_changeset, field, incrementer \\ &increment_with_rollover/1) do
changeset = change(data_or_changeset, %{})
current = get_field(changeset, field)
# Apply these changes only inside the repo because we
# don't want to permanently track the lock change.
changeset = prepare_changes(changeset, fn changeset ->
put_in(changeset.changes[field], incrementer.(current))
end)
changeset = put_in(changeset.filters[field], current)
changeset
end
# increment_with_rollover expect to be used with lock_version set as :integer in db schema
# 2_147_483_647 is upper limit for signed integer for both PostgreSQL and MySQL
defp increment_with_rollover(val) when val >= 2_147_483_647 do
1
end
defp increment_with_rollover(val) when is_integer(val) do
val + 1
end
@doc """
Provides a function executed by the repository on insert/update/delete.
If the changeset given to the repository is valid, the function given to
`prepare_changes/2` will be called with the changeset and must return a
changeset, allowing developers to do final adjustments to the changeset or
to issue data consistency commands. The repository itself can be accessed
inside the function under the `repo` field in the changeset. If the
changeset given to the repository is invalid, the function will not be
invoked.
The given function is guaranteed to run inside the same transaction
as the changeset operation for databases that do support transactions.
## Example
A common use case is updating a counter cache, in this case updating a post's
comment count when a comment is created:
def create_comment(comment, params) do
comment
|> cast(params, [:body, :post_id])
|> prepare_changes(fn changeset ->
if post_id = get_change(changeset, :post_id) do
query = from Post, where: [id: ^post_id]
changeset.repo.update_all(query, inc: [comment_count: 1])
end
changeset
end)
end
We retrieve the repo from the comment changeset itself and use
update_all to update the counter cache in one query. Finally, the original
changeset must be returned.
"""
@spec prepare_changes(t, (t -> t)) :: t
def prepare_changes(%Changeset{prepare: prepare} = changeset, function) when is_function(function, 1) do
%{changeset | prepare: [function | prepare]}
end
## Constraints
@doc """
Returns all constraints in a changeset.
A constraint is a map with the following fields:
* `:type` - the type of the constraint that will be checked in the database,
such as `:check`, `:unique`, etc
* `:constraint` - the database constraint name as a string
* `:match` - the type of match Ecto will perform on a violated constraint
against the `:constraint` value. It is `:exact`, `:suffix` or `:prefix`
* `:field` - the field a violated constraint will apply the error to
* `:error_message` - the error message in case of violated constraints
* `:error_type` - the type of error that identifies the error message
"""
@spec constraints(t) :: [constraint]
def constraints(%Changeset{constraints: constraints}) do
constraints
end
@doc """
Checks for a check constraint in the given field.
The check constraint works by relying on the database to check
if the check constraint has been violated or not and, if so,
Ecto converts it into a changeset error.
In order to use the check constraint, the first step is
to define the check constraint in a migration:
create constraint("users", :price_must_be_positive, check: "price > 0")
Now that a constraint exists, when modifying users, we could
annotate the changeset with a check constraint so Ecto knows
how to convert it into an error message:
cast(user, params, [:price])
|> check_constraint(:price, name: :price_must_be_positive)
Now, when invoking `Repo.insert/2` or `Repo.update/2`, if the
price is not positive, it will be converted into an error and
`{:error, changeset}` returned by the repository. Note that the error
will occur only after hitting the database so it will not be visible
until all other validations pass.
## Options
* `:message` - the message in case the constraint check fails.
Defaults to "is invalid"
* `:name` - the name of the constraint. Required.
* `:match` - how the changeset constraint name is matched against the
repo constraint, may be `:exact`, `:suffix` or `:prefix`. Defaults to `:exact`.
`:suffix` matches any repo constraint which `ends_with?` `:name`
to this changeset constraint.
`:prefix` matches any repo constraint which `starts_with?` `:name`
to this changeset constraint.
"""
def check_constraint(changeset, field, opts \\ []) do
constraint = opts[:name] || raise ArgumentError, "must supply the name of the constraint"
message = message(opts, "is invalid")
match_type = Keyword.get(opts, :match, :exact)
add_constraint(changeset, :check, to_string(constraint), match_type, field, message)
end
@doc """
Checks for a unique constraint in the given field or list of fields.
The unique constraint works by relying on the database to check
if the unique constraint has been violated or not and, if so,
Ecto converts it into a changeset error.
In order to use the uniqueness constraint, the first step is
to define the unique index in a migration:
create unique_index(:users, [:email])
Now that a constraint exists, when modifying users, we could
annotate the changeset with a unique constraint so Ecto knows
how to convert it into an error message:
cast(user, params, [:email])
|> unique_constraint(:email)
Now, when invoking `Repo.insert/2` or `Repo.update/2`, if the
email already exists, it will be converted into an error and
`{:error, changeset}` returned by the repository. Note that the error
will occur only after hitting the database so it will not be visible
until all other validations pass.
## Options
* `:message` - the message in case the constraint check fails,
defaults to "has already been taken"
* `:name` - the constraint name. By default, the constraint
name is inferred from the table + field(s). May be required
explicitly for complex cases
* `:match` - how the changeset constraint name is matched against the
repo constraint, may be `:exact` or `:suffix`. Defaults to `:exact`.
`:suffix` matches any repo constraint which `ends_with?` `:name`
to this changeset constraint.
## Complex constraints
Because the constraint logic is in the database, we can leverage
all the database functionality when defining them. For example,
let's suppose the e-mails are scoped by company id:
# In migration
create unique_index(:users, [:email, :company_id])
# In the changeset function
cast(user, params, [:email])
|> unique_constraint([:email, :company_id])
The first field name, `:email` in this case, will be used as the error
key to the changeset errors keyword list. For example, the above
`unique_constraint/3` would generate something like:
Repo.insert!(%User{email: "john@elixir.org", company_id: 1})
changeset = User.changeset(%User{}, %{email: "john@elixir.org", company_id: 1})
{:error, changeset} = Repo.insert(changeset)
changeset.errors #=> [email: {"has already been taken", []}]
In complex cases, instead of relying on name inference, it may be best
to set the constraint name explicitly:
# In the migration
create unique_index(:users, [:email, :company_id], name: :users_email_company_id_index)
# In the changeset function
cast(user, params, [:email])
|> unique_constraint(:email, name: :users_email_company_id_index)
### Partitioning
If your table is partitioned, then your unique index might look different
per partition, e.g. Postgres adds p<number> to the middle of your key, like:
users_p0_email_key
users_p1_email_key
...
users_p99_email_key
In this case you can use the name and suffix options together to match on
these dynamic indexes, like:
cast(user, params, [:email])
|> unique_constraint(:email, name: :email_key, match: :suffix)
## Case sensitivity
Unfortunately, different databases provide different guarantees
when it comes to case-sensitiveness. For example, in MySQL, comparisons
are case-insensitive by default. In Postgres, users can define case
insensitive column by using the `:citext` type/extension. In your migration:
execute "CREATE EXTENSION IF NOT EXISTS citext"
create table(:users) do
...
add :email, :citext
...
end
If for some reason your database does not support case insensitive columns,
you can explicitly downcase values before inserting/updating them:
cast(data, params, [:email])
|> update_change(:email, &String.downcase/1)
|> unique_constraint(:email)
"""
@spec unique_constraint(t, atom | [atom, ...], Keyword.t) :: t
def unique_constraint(changeset, field_or_fields, opts \\ [])
def unique_constraint(changeset, field, opts) when is_atom(field) do
unique_constraint(changeset, [field], opts)
end
def unique_constraint(changeset, [first_field | _] = fields, opts) do
constraint = opts[:name] || unique_index_name(changeset, fields)
message = message(opts, "has already been taken")
match_type = Keyword.get(opts, :match, :exact)
add_constraint(changeset, :unique, to_string(constraint), match_type, first_field, message)
end
defp unique_index_name(changeset, fields) do
field_names = Enum.map(fields, &get_field_source(changeset, &1))
Enum.join([get_source(changeset)] ++ field_names ++ ["index"], "_")
end
@doc """
Checks for foreign key constraint in the given field.
The foreign key constraint works by relying on the database to
check if the associated data exists or not. This is useful to
guarantee that a child will only be created if the parent exists
in the database too.
In order to use the foreign key constraint the first step is
to define the foreign key in a migration. This is often done
with references. For example, imagine you are creating a
comments table that belongs to posts. One would have:
create table(:comments) do
add :post_id, references(:posts)
end
By default, Ecto will generate a foreign key constraint with
name "comments_post_id_fkey" (the name is configurable).
Now that a constraint exists, when creating comments, we could
annotate the changeset with foreign key constraint so Ecto knows
how to convert it into an error message:
cast(comment, params, [:post_id])
|> foreign_key_constraint(:post_id)
Now, when invoking `Repo.insert/2` or `Repo.update/2`, if the
associated post does not exist, it will be converted into an
error and `{:error, changeset}` returned by the repository.
## Options
* `:message` - the message in case the constraint check fails,
defaults to "does not exist"
* `:name` - the constraint name. By default, the constraint
name is inferred from the table + field. May be required
explicitly for complex cases
"""
@spec foreign_key_constraint(t, atom, Keyword.t) :: t
def foreign_key_constraint(changeset, field, opts \\ []) do
constraint = opts[:name] || "#{get_source(changeset)}_#{get_field_source(changeset, field)}_fkey"
message = message(opts, "does not exist")
add_constraint(changeset, :foreign_key, to_string(constraint), :exact, field, message, :foreign)
end
@doc """
Checks the associated field exists.
This is similar to `foreign_key_constraint/3` except that the
field is inferred from the association definition. This is useful
to guarantee that a child will only be created if the parent exists
in the database too. Therefore, it only applies to `belongs_to`
associations.
As the name says, a constraint is required in the database for
this function to work. Such constraint is often added as a
reference to the child table:
create table(:comments) do
add :post_id, references(:posts)
end
Now, when inserting a comment, it is possible to forbid any
comment to be added if the associated post does not exist:
comment
|> Ecto.Changeset.cast(params, [:post_id])
|> Ecto.Changeset.assoc_constraint(:post)
|> Repo.insert
## Options
* `:message` - the message in case the constraint check fails,
defaults to "does not exist"
* `:name` - the constraint name. By default, the constraint
name is inferred from the table + association field.
May be required explicitly for complex cases
"""
@spec assoc_constraint(t, atom, Keyword.t) :: t
def assoc_constraint(changeset, assoc, opts \\ []) do
constraint = opts[:name] ||
case get_assoc(changeset, assoc) do
%Ecto.Association.BelongsTo{owner_key: owner_key} ->
"#{get_source(changeset)}_#{owner_key}_fkey"
other ->
raise ArgumentError,
"assoc_constraint can only be added to belongs to associations, got: #{inspect other}"
end
message = message(opts, "does not exist")
add_constraint(changeset, :foreign_key, to_string(constraint), :exact, assoc, message, :assoc)
end
@doc """
Checks the associated field does not exist.
This is similar to `foreign_key_constraint/3` except that the
field is inferred from the association definition. This is useful
to guarantee that parent can only be deleted (or have its primary
key changed) if no child exists in the database. Therefore, it only
applies to `has_*` associations.
As the name says, a constraint is required in the database for
this function to work. Such constraint is often added as a
reference to the child table:
create table(:comments) do
add :post_id, references(:posts)
end
Now, when deleting the post, it is possible to forbid any post to
be deleted if they still have comments attached to it:
post
|> Ecto.Changeset.change
|> Ecto.Changeset.no_assoc_constraint(:comments)
|> Repo.delete
## Options
* `:message` - the message in case the constraint check fails,
defaults to "is still associated with this entry" (for `has_one`)
and "are still associated with this entry" (for `has_many`)
* `:name` - the constraint name. By default, the constraint
name is inferred from the association table + association
field. May be required explicitly for complex cases
"""
@spec no_assoc_constraint(t, atom, Keyword.t) :: t
def no_assoc_constraint(changeset, assoc, opts \\ []) do
{constraint, message} =
case get_assoc(changeset, assoc) do
%Ecto.Association.Has{cardinality: cardinality,
related_key: related_key, related: related} ->
{opts[:name] || "#{related.__schema__(:source)}_#{related_key}_fkey",
message(opts, no_assoc_message(cardinality))}
other ->
raise ArgumentError,
"no_assoc_constraint can only be added to has one/many associations, got: #{inspect other}"
end
add_constraint(changeset, :foreign_key, to_string(constraint), :exact, assoc, message, :no_assoc)
end
@doc """
Checks for an exclusion constraint in the given field.
The exclusion constraint works by relying on the database to check
if the exclusion constraint has been violated or not and, if so,
Ecto converts it into a changeset error.
## Options
* `:message` - the message in case the constraint check fails,
defaults to "violates an exclusion constraint"
* `:name` - the constraint name. By default, the constraint
name is inferred from the table + field. May be required
explicitly for complex cases
* `:match` - how the changeset constraint name is matched against the
repo constraint, may be `:exact` or `:suffix`. Defaults to `:exact`.
`:suffix` matches any repo constraint which `ends_with?` `:name`
to this changeset constraint.
"""
def exclusion_constraint(changeset, field, opts \\ []) do
constraint = opts[:name] || "#{get_source(changeset)}_#{get_field_source(changeset, field)}_exclusion"
message = message(opts, "violates an exclusion constraint")
match_type = Keyword.get(opts, :match, :exact)
add_constraint(changeset, :exclusion, to_string(constraint), match_type, field, message, :exclusion)
end
defp no_assoc_message(:one), do: "is still associated with this entry"
defp no_assoc_message(:many), do: "are still associated with this entry"
defp add_constraint(changeset, type, constraint, match, field, message) do
add_constraint(changeset, type, constraint, match, field, message, type)
end
defp add_constraint(%Changeset{constraints: constraints} = changeset,
type, constraint, match, field, error_message, error_type)
when is_binary(constraint) and is_atom(field) and is_binary(error_message) do
unless match in @match_types do
raise ArgumentError, "invalid match type: #{inspect match}. Allowed match types: #{inspect @match_types}"
end
constraint = %{
constraint: constraint,
error_message: error_message,
error_type: error_type,
field: field,
match: match,
type: type
}
%{changeset | constraints: [constraint | constraints]}
end
defp get_source(%{data: %{__meta__: %{source: source}}}) when is_binary(source),
do: source
defp get_source(%{data: data}), do:
raise ArgumentError, "cannot add constraint to changeset because it does not have a source, got: #{inspect data}"
defp get_source(item), do:
raise ArgumentError, "cannot add constraint because a changeset was not supplied, got: #{inspect item}"
defp get_assoc(%{types: types}, assoc) do
case Map.fetch(types, assoc) do
{:ok, {:assoc, association}} ->
association
_ ->
raise_invalid_assoc(types, assoc)
end
end
defp raise_invalid_assoc(types, assoc) do
associations = for {_key, {:assoc, %{field: field}}} <- types, do: field
raise ArgumentError, "cannot add constraint to changeset because association `#{assoc}` does not exist. " <>
"Did you mean one of `#{Enum.join(associations, "`, `")}`?"
end
defp get_field_source(%{data: %{__struct__: schema}}, field) when is_atom(schema),
do: schema.__schema__(:field_source, field) || field
defp get_field_source(%{}, field),
do: field
@doc ~S"""
Traverses changeset errors and applies the given function to error messages.
This function is particularly useful when associations and embeds
are cast in the changeset as it will traverse all associations and
embeds and place all errors in a series of nested maps.
A changeset is supplied along with a function to apply to each
error message as the changeset is traversed. The error message
function receives an error tuple `{msg, opts}`, for example:
{"should be at least %{count} characters", [count: 3, validation: :length, min: 3]}
## Examples
iex> traverse_errors(changeset, fn {msg, opts} ->
...> Enum.reduce(opts, msg, fn {key, value}, acc ->
...> String.replace(acc, "%{#{key}}", to_string(value))
...> end)
...> end)
%{title: ["should be at least 3 characters"]}
Optionally function can accept three arguments: `changeset`, `field`
and error tuple `{msg, opts}`. It is useful whenever you want to extract
validations rules from `changeset.validations` to build detailed error
description.
"""
@spec traverse_errors(t, (error -> String.t) | (Changeset.t, atom, error -> String.t)) :: %{atom => [term]}
def traverse_errors(%Changeset{errors: errors, changes: changes, types: types} = changeset, msg_func)
when is_function(msg_func, 1) or is_function(msg_func, 3) do
errors
|> Enum.reverse()
|> merge_keyword_keys(msg_func, changeset)
|> merge_related_keys(changes, types, msg_func, &traverse_errors/2)
end
defp merge_keyword_keys(keyword_list, msg_func, _) when is_function(msg_func, 1) do
Enum.reduce(keyword_list, %{}, fn({key, val}, acc) ->
val = msg_func.(val)
Map.update(acc, key, [val], &[val|&1])
end)
end
defp merge_keyword_keys(keyword_list, msg_func, changeset) when is_function(msg_func, 3) do
Enum.reduce(keyword_list, %{}, fn({key, val}, acc) ->
val = msg_func.(changeset, key, val)
Map.update(acc, key, [val], &[val|&1])
end)
end
defp merge_related_keys(_, _, nil, _, _) do
raise ArgumentError, "changeset does not have types information"
end
defp merge_related_keys(map, changes, types, msg_func, traverse_function) do
Enum.reduce types, map, fn
{field, {tag, %{cardinality: :many}}}, acc when tag in @relations ->
if changesets = Map.get(changes, field) do
{child, all_empty?} =
Enum.map_reduce(changesets, true, fn changeset, all_empty? ->
child = traverse_function.(changeset, msg_func)
{child, all_empty? and child == %{}}
end)
case all_empty? do
true -> acc
false -> Map.put(acc, field, child)
end
else
acc
end
{field, {tag, %{cardinality: :one}}}, acc when tag in @relations ->
if changeset = Map.get(changes, field) do
case traverse_function.(changeset, msg_func) do
child when child == %{} -> acc
child -> Map.put(acc, field, child)
end
else
acc
end
{_, _}, acc ->
acc
end
end
defp apply_relation_changes(acc, key, relation, value) do
relation_changed = Relation.apply_changes(relation, value)
acc = Map.put(acc, key, relation_changed)
with %Ecto.Association.BelongsTo{related_key: related_key} <- relation,
%{^related_key => id} <- relation_changed do
Map.put(acc, relation.owner_key, id)
else
_ -> acc
end
end
@doc ~S"""
Traverses changeset validations and applies the given function to validations.
This behaves the same as `traverse_errors/2`, but operates on changeset
validations instead of errors.
## Examples
iex> traverse_validations(changeset, &(&1))
%{title: [format: ~r/pattern/, length: [min: 1, max: 20]]}
iex> traverse_validations(changeset, fn
...> {:length, opts} -> {:length, "#{Keyword.get(opts, :min, 0)}-#{Keyword.get(opts, :max, 32)}"}
...> {:format, %Regex{source: source}} -> {:format, "/#{source}/"}
...> {other, opts} -> {other, inspect(opts)}
...> end)
%{title: [format: "/pattern/", length: "1-20"]}
"""
@spec traverse_validations(t, (error -> String.t) | (Changeset.t, atom, error -> String.t)) :: %{atom => [term]}
def traverse_validations(%Changeset{validations: validations, changes: changes, types: types} = changeset, msg_func)
when is_function(msg_func, 1) or is_function(msg_func, 3) do
validations
|> Enum.reverse()
|> merge_keyword_keys(msg_func, changeset)
|> merge_related_keys(changes, types, msg_func, &traverse_validations/2)
end
end
defimpl Inspect, for: Ecto.Changeset do
import Inspect.Algebra
def inspect(%Ecto.Changeset{data: data} = changeset, opts) do
list = for attr <- [:action, :changes, :errors, :data, :valid?] do
{attr, Map.get(changeset, attr)}
end
redacted_fields = case data do
%type{} ->
if function_exported?(type, :__schema__, 1) do
type.__schema__(:redact_fields)
else
[]
end
_ -> []
end
container_doc("#Ecto.Changeset<", list, ">", opts, fn
{:action, action}, opts -> concat("action: ", to_doc(action, opts))
{:changes, changes}, opts -> concat("changes: ", changes |> filter(redacted_fields) |> to_doc(opts))
{:data, data}, _opts -> concat("data: ", to_struct(data, opts))
{:errors, errors}, opts -> concat("errors: ", to_doc(errors, opts))
{:valid?, valid?}, opts -> concat("valid?: ", to_doc(valid?, opts))
end)
end
defp to_struct(%{__struct__: struct}, _opts), do: "#" <> Kernel.inspect(struct) <> "<>"
defp to_struct(other, opts), do: to_doc(other, opts)
defp filter(changes, redacted_fields) do
Enum.reduce(redacted_fields, changes, fn redacted_field, changes ->
if Map.has_key?(changes, redacted_field) do
Map.put(changes, redacted_field, "**redacted**")
else
changes
end
end)
end
end
| 38.415909 | 169 | 0.663618 |
ff630f4d62c5535137e72ed307b695ec9ac18a33 | 100,543 | exs | Elixir | test/validation/line_polygon_a_test.exs | codabrink/topo | f1ca4b7fe337a67285ee4c65a34fb521b119342c | [
"MIT"
] | 110 | 2016-05-05T21:09:19.000Z | 2022-03-08T05:22:16.000Z | test/validation/line_polygon_a_test.exs | codabrink/topo | f1ca4b7fe337a67285ee4c65a34fb521b119342c | [
"MIT"
] | 15 | 2016-12-01T00:32:11.000Z | 2022-01-18T13:56:37.000Z | test/validation/line_polygon_a_test.exs | codabrink/topo | f1ca4b7fe337a67285ee4c65a34fb521b119342c | [
"MIT"
] | 24 | 2016-09-19T20:06:50.000Z | 2021-06-16T06:41:10.000Z | defmodule Intersect.Validation.LinePolygonATest do
use ExUnit.Case
@tag :validation
test "05-001 - L/A-3-1: a line touching the closing point of a polygon" do
a = "LINESTRING (150 150, 40 230)" |> Geo.WKT.decode!()
b = "POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-001 - L/A-3-1: a line touching the closing point of a polygon (float)" do
a = "LINESTRING(150.0 150.0,40.0 230.0)" |> Geo.WKT.decode!()
b = "POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-002 - L/A-3-2: the start and end points of a LineString touching the boundary (at non-vertices) of a polygon" do
a = "LINESTRING (40 40, 50 130, 130 130)" |> Geo.WKT.decode!()
b = "POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-002 - L/A-3-2: the start and end points of a LineString touching the boundary (at non-vertices) of a polygon (float)" do
a = "LINESTRING(40.0 40.0,50.0 130.0,130.0 130.0)" |> Geo.WKT.decode!()
b = "POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-003 - L/A-3-3: the end point of a line touching the closing point of a polygon" do
a = "LINESTRING (40 230, 150 150)" |> Geo.WKT.decode!()
b = "POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-003 - L/A-3-3: the end point of a line touching the closing point of a polygon (float)" do
a = "LINESTRING(40.0 230.0,150.0 150.0)" |> Geo.WKT.decode!()
b = "POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-004 - L/A-3-4: an entire LineString touching the boundary (at non-vertices) of a polygon" do
a = "LINESTRING (210 150, 330 150)" |> Geo.WKT.decode!()
b = "POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-004 - L/A-3-4: an entire LineString touching the boundary (at non-vertices) of a polygon (float)" do
a = "LINESTRING(210.0 150.0,330.0 150.0)" |> Geo.WKT.decode!()
b = "POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-005 - L/A-3-5: the start portion of a LineString touching the boundary (at non-vertices) of a polygon" do
a = "LINESTRING (200 150, 310 150, 360 220)" |> Geo.WKT.decode!()
b = "POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-005 - L/A-3-5: the start portion of a LineString touching the boundary (at non-vertices) of a polygon (float)" do
a = "LINESTRING(200.0 150.0,310.0 150.0,360.0 220.0)" |> Geo.WKT.decode!()
b = "POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-006 - L/A-3-6: the start portion and the end point of a LineString touching the boundary of a polygon" do
a = "LINESTRING (180 150, 250 150, 230 250, 370 250, 410 150)" |> Geo.WKT.decode!()
b = "POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-006 - L/A-3-6: the start portion and the end point of a LineString touching the boundary of a polygon (float)" do
a =
"LINESTRING(180.0 150.0,250.0 150.0,230.0 250.0,370.0 250.0,410.0 150.0)"
|> Geo.WKT.decode!()
b = "POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-007 - L/A-3-7: the middle portion of a LineString touching the boundary (at non-vertices) of a polygon" do
a = "LINESTRING (210 210, 220 150, 320 150, 370 210)" |> Geo.WKT.decode!()
b = "POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-007 - L/A-3-7: the middle portion of a LineString touching the boundary (at non-vertices) of a polygon (float)" do
a = "LINESTRING(210.0 210.0,220.0 150.0,320.0 150.0,370.0 210.0)" |> Geo.WKT.decode!()
b = "POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-008 - L/A-4-1: a line at non-vertex crossing non-vertex boundary of polygon" do
a = "LINESTRING (20 60, 150 60)" |> Geo.WKT.decode!()
b = "POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-008 - L/A-4-1: a line at non-vertex crossing non-vertex boundary of polygon (float)" do
a = "LINESTRING(20.0 60.0,150.0 60.0)" |> Geo.WKT.decode!()
b = "POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-009 - L/A-4-2: a line at non-vertex crossing non-vertex boundaries of polygon twice" do
a = "LINESTRING (60 90, 310 180)" |> Geo.WKT.decode!()
b = "POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-009 - L/A-4-2: a line at non-vertex crossing non-vertex boundaries of polygon twice (float)" do
a = "LINESTRING(60.0 90.0,310.0 180.0)" |> Geo.WKT.decode!()
b = "POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-010 - L/A-4-3: a line at non-vertex crossing vertex boundary of polygon" do
a = "LINESTRING (90 210, 210 90)" |> Geo.WKT.decode!()
b = "POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-010 - L/A-4-3: a line at non-vertex crossing vertex boundary of polygon (float)" do
a = "LINESTRING(90.0 210.0,210.0 90.0)" |> Geo.WKT.decode!()
b = "POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-011 - L/A-4-4: a line at non-vertex crossing vertex boundaries of polygon twice" do
a = "LINESTRING (290 10, 130 170)" |> Geo.WKT.decode!()
b = "POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-011 - L/A-4-4: a line at non-vertex crossing vertex boundaries of polygon twice (float)" do
a = "LINESTRING(290.0 10.0,130.0 170.0)" |> Geo.WKT.decode!()
b = "POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-012 - L/A-4-5: a line at vertex crossing non-vertex boundary of polygon" do
a = "LINESTRING (30 100, 100 100, 180 100)" |> Geo.WKT.decode!()
b = "POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-012 - L/A-4-5: a line at vertex crossing non-vertex boundary of polygon (float)" do
a = "LINESTRING(30.0 100.0,100.0 100.0,180.0 100.0)" |> Geo.WKT.decode!()
b = "POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-013 - L/A-4-6: a line at vertex crossing non-vertex boundaries of polygon twice" do
a = "LINESTRING (20 100, 100 100, 360 100, 410 100)" |> Geo.WKT.decode!()
b = "POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-013 - L/A-4-6: a line at vertex crossing non-vertex boundaries of polygon twice (float)" do
a = "LINESTRING(20.0 100.0,100.0 100.0,360.0 100.0,410.0 100.0)" |> Geo.WKT.decode!()
b = "POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-014 - L/A-4-7: a line at vertex crossing vertex boundary of polygon" do
a = "LINESTRING (90 210, 150 150, 210 90)" |> Geo.WKT.decode!()
b = "POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-014 - L/A-4-7: a line at vertex crossing vertex boundary of polygon (float)" do
a = "LINESTRING(90.0 210.0,150.0 150.0,210.0 90.0)" |> Geo.WKT.decode!()
b = "POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-015 - L/A-5-1: an entire line within a polygon" do
a = "LINESTRING (180 90, 280 120)" |> Geo.WKT.decode!()
b = "POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-015 - L/A-5-1: an entire line within a polygon (float)" do
a = "LINESTRING(180.0 90.0,280.0 120.0)" |> Geo.WKT.decode!()
b = "POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-016 - L/A-5-2: a line within a polygon but the line's both ends touching the boundary of the polygon" do
a = "LINESTRING (70 70, 80 20)" |> Geo.WKT.decode!()
b = "POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-016 - L/A-5-2: a line within a polygon but the line's both ends touching the boundary of the polygon (float)" do
a = "LINESTRING(70.0 70.0,80.0 20.0)" |> Geo.WKT.decode!()
b = "POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-017 - L/A-5-3: a line within a polygon but the line's start point touching the boundary of the polygon" do
a = "LINESTRING (130 20, 150 60)" |> Geo.WKT.decode!()
b = "POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-017 - L/A-5-3: a line within a polygon but the line's start point touching the boundary of the polygon (float)" do
a = "LINESTRING(130.0 20.0,150.0 60.0)" |> Geo.WKT.decode!()
b = "POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-018 - L/A-5-4: a line within a polygon but the line's start point and middle portion touching the boundary of the polygon" do
a = "LINESTRING (70 70, 80 20, 140 20, 150 60)" |> Geo.WKT.decode!()
b = "POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-018 - L/A-5-4: a line within a polygon but the line's start point and middle portion touching the boundary of the polygon (float)" do
a = "LINESTRING(70.0 70.0,80.0 20.0,140.0 20.0,150.0 60.0)" |> Geo.WKT.decode!()
b = "POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-019 - L/A-5-5: a line within a polygon but the line's middle portion touching the boundary of the polygon" do
a = "LINESTRING (170 50, 170 20, 240 20, 260 60)" |> Geo.WKT.decode!()
b = "POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-019 - L/A-5-5: a line within a polygon but the line's middle portion touching the boundary of the polygon (float)" do
a = "LINESTRING(170.0 50.0,170.0 20.0,240.0 20.0,260.0 60.0)" |> Geo.WKT.decode!()
b = "POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-020 - L/Ah-2-1: a line outside a polygon" do
a = "LINESTRING (50 100, 140 190, 280 190)" |> Geo.WKT.decode!()
b =
"POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150),(170 120, 330 120, 260 50, 100 50, 170 120))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == false
assert Topo.intersects?(b, a) == false
assert Topo.disjoint?(a, b) == true
assert Topo.disjoint?(b, a) == true
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-020 - L/Ah-2-1: a line outside a polygon (float)" do
a = "LINESTRING(50.0 100.0,140.0 190.0,280.0 190.0)" |> Geo.WKT.decode!()
b =
"POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0),(170.0 120.0,330.0 120.0,260.0 50.0,100.0 50.0,170.0 120.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == false
assert Topo.intersects?(b, a) == false
assert Topo.disjoint?(a, b) == true
assert Topo.disjoint?(b, a) == true
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-021 - L/Ah-2-2: a line inside a polygon's hole" do
a = "LINESTRING (140 60, 180 100, 290 100)" |> Geo.WKT.decode!()
b =
"POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150),(170 120, 330 120, 260 50, 100 50, 170 120))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == false
assert Topo.intersects?(b, a) == false
assert Topo.disjoint?(a, b) == true
assert Topo.disjoint?(b, a) == true
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-021 - L/Ah-2-2: a line inside a polygon's hole (float)" do
a = "LINESTRING(140.0 60.0,180.0 100.0,290.0 100.0)" |> Geo.WKT.decode!()
b =
"POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0),(170.0 120.0,330.0 120.0,260.0 50.0,100.0 50.0,170.0 120.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == false
assert Topo.intersects?(b, a) == false
assert Topo.disjoint?(a, b) == true
assert Topo.disjoint?(b, a) == true
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-022 - L/Ah-3-1: the start point of a line touching the inner boundary of a polygon" do
a = "LINESTRING (170 120, 210 80, 270 80)" |> Geo.WKT.decode!()
b =
"POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150),(170 120, 330 120, 260 50, 100 50, 170 120))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-022 - L/Ah-3-1: the start point of a line touching the inner boundary of a polygon (float)" do
a = "LINESTRING(170.0 120.0,210.0 80.0,270.0 80.0)" |> Geo.WKT.decode!()
b =
"POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0),(170.0 120.0,330.0 120.0,260.0 50.0,100.0 50.0,170.0 120.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-023 - L/Ah-3-2: both ends of a line touching the inner boundary of a polygon" do
a = "LINESTRING (170 120, 260 50)" |> Geo.WKT.decode!()
b =
"POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150),(170 120, 330 120, 260 50, 100 50, 170 120))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-023 - L/Ah-3-2: both ends of a line touching the inner boundary of a polygon (float)" do
a = "LINESTRING(170.0 120.0,260.0 50.0)" |> Geo.WKT.decode!()
b =
"POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0),(170.0 120.0,330.0 120.0,260.0 50.0,100.0 50.0,170.0 120.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-024 - L/Ah-3-1: both ends of a line touching the inner boundary of a polygon" do
a = "LINESTRING (190 90, 190 270)" |> Geo.WKT.decode!()
b =
"POLYGON ((190 190, 360 20, 20 20, 190 190),(190 190, 280 50, 100 50, 190 190))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-024 - L/Ah-3-1: both ends of a line touching the inner boundary of a polygon (float)" do
a = "LINESTRING(190.0 90.0,190.0 270.0)" |> Geo.WKT.decode!()
b =
"POLYGON((190.0 190.0,360.0 20.0,20.0 20.0,190.0 190.0),(190.0 190.0,280.0 50.0,100.0 50.0,190.0 190.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-025 - L/Ah-3-2: a line at a non-vertex crossing the boundary of a polygon where the closing point of the hole touches the shell at a non-vertex" do
a = "LINESTRING (60 160, 150 70)" |> Geo.WKT.decode!()
b =
"POLYGON ((190 190, 360 20, 20 20, 190 190),(110 110, 250 100, 140 30, 110 110))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-025 - L/Ah-3-2: a line at a non-vertex crossing the boundary of a polygon where the closing point of the hole touches the shell at a non-vertex (float)" do
a = "LINESTRING(60.0 160.0,150.0 70.0)" |> Geo.WKT.decode!()
b =
"POLYGON((190.0 190.0,360.0 20.0,20.0 20.0,190.0 190.0),(110.0 110.0,250.0 100.0,140.0 30.0,110.0 110.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-026 - L/Ah-3-3: a line at a non-vertex crossing the boundary of a polygon where the hole at a vertex touches the shell at a non-vertex" do
a = "LINESTRING (60 160, 150 70)" |> Geo.WKT.decode!()
b =
"POLYGON ((190 190, 20 20, 360 20, 190 190),(250 100, 110 110, 140 30, 250 100))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-026 - L/Ah-3-3: a line at a non-vertex crossing the boundary of a polygon where the hole at a vertex touches the shell at a non-vertex (float)" do
a = "LINESTRING(60.0 160.0,150.0 70.0)" |> Geo.WKT.decode!()
b =
"POLYGON((190.0 190.0,20.0 20.0,360.0 20.0,190.0 190.0),(250.0 100.0,110.0 110.0,140.0 30.0,250.0 100.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-027 - L/Ah-3-4: a line at a non-vertex crossing the boundary of a polygon where the hole at a vertex touches the shell at a vertex" do
a = "LINESTRING (60 160, 150 70)" |> Geo.WKT.decode!()
b =
"POLYGON ((190 190, 20 20, 360 20, 190 190),(250 100, 110 110, 140 30, 250 100))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-027 - L/Ah-3-4: a line at a non-vertex crossing the boundary of a polygon where the hole at a vertex touches the shell at a vertex (float)" do
a = "LINESTRING(60.0 160.0,150.0 70.0)" |> Geo.WKT.decode!()
b =
"POLYGON((190.0 190.0,20.0 20.0,360.0 20.0,190.0 190.0),(250.0 100.0,110.0 110.0,140.0 30.0,250.0 100.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-028 - L/Ah-3-5: a line crossing polygon boundary where the closing point of the hole touches the shell at a vertex" do
a = "LINESTRING (190 90, 190 190, 190 270)" |> Geo.WKT.decode!()
b =
"POLYGON ((190 190, 360 20, 20 20, 190 190),(190 190, 280 50, 100 50, 190 190))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-028 - L/Ah-3-5: a line crossing polygon boundary where the closing point of the hole touches the shell at a vertex (float)" do
a = "LINESTRING(190.0 90.0,190.0 190.0,190.0 270.0)" |> Geo.WKT.decode!()
b =
"POLYGON((190.0 190.0,360.0 20.0,20.0 20.0,190.0 190.0),(190.0 190.0,280.0 50.0,100.0 50.0,190.0 190.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-029 - L/Ah-3-6: a line at a vertex crossing the boundary of a polygon where closing point of the hole touches the shell at a non-vertex" do
a = "LINESTRING (60 160, 110 110, 150 70)" |> Geo.WKT.decode!()
b =
"POLYGON ((190 190, 360 20, 20 20, 190 190),(110 110, 250 100, 140 30, 110 110))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-029 - L/Ah-3-6: a line at a vertex crossing the boundary of a polygon where closing point of the hole touches the shell at a non-vertex (float)" do
a = "LINESTRING(60.0 160.0,110.0 110.0,150.0 70.0)" |> Geo.WKT.decode!()
b =
"POLYGON((190.0 190.0,360.0 20.0,20.0 20.0,190.0 190.0),(110.0 110.0,250.0 100.0,140.0 30.0,110.0 110.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-030 - L/Ah-3-7: a line at a vertex crossing the boundary of a polygon where the hole at a vertex touches the shell at a non-vertex" do
a = "LINESTRING (60 160, 110 110, 150 70)" |> Geo.WKT.decode!()
b =
"POLYGON ((190 190, 20 20, 360 20, 190 190),(250 100, 110 110, 140 30, 250 100))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-030 - L/Ah-3-7: a line at a vertex crossing the boundary of a polygon where the hole at a vertex touches the shell at a non-vertex (float)" do
a = "LINESTRING(60.0 160.0,110.0 110.0,150.0 70.0)" |> Geo.WKT.decode!()
b =
"POLYGON((190.0 190.0,20.0 20.0,360.0 20.0,190.0 190.0),(250.0 100.0,110.0 110.0,140.0 30.0,250.0 100.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-031 - L/Ah-3-8: a line at a non-vertex crossing the boundary of a polygon where the hole at a vertex touches the shell at a vertex" do
a = "LINESTRING (60 160, 110 110, 150 70)" |> Geo.WKT.decode!()
b =
"POLYGON ((190 190, 110 110, 20 20, 360 20, 190 190),(250 100, 110 110, 140 30, 250 100))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-031 - L/Ah-3-8: a line at a non-vertex crossing the boundary of a polygon where the hole at a vertex touches the shell at a vertex (float)" do
a = "LINESTRING(60.0 160.0,110.0 110.0,150.0 70.0)" |> Geo.WKT.decode!()
b =
"POLYGON((190.0 190.0,110.0 110.0,20.0 20.0,360.0 20.0,190.0 190.0),(250.0 100.0,110.0 110.0,140.0 30.0,250.0 100.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-032 - L/A2h-3-1: the start point a line touching the closing points of two connected holes in a polygon" do
a = "LINESTRING (130 110, 180 110, 190 60)" |> Geo.WKT.decode!()
b =
"POLYGON ((20 200, 240 200, 240 20, 20 20, 20 200),(130 110, 60 180, 60 40, 130 110),(130 110, 200 40, 200 180, 130 110))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-032 - L/A2h-3-1: the start point a line touching the closing points of two connected holes in a polygon (float)" do
a = "LINESTRING(130.0 110.0,180.0 110.0,190.0 60.0)" |> Geo.WKT.decode!()
b =
"POLYGON((20.0 200.0,240.0 200.0,240.0 20.0,20.0 20.0,20.0 200.0),(130.0 110.0,60.0 180.0,60.0 40.0,130.0 110.0),(130.0 110.0,200.0 40.0,200.0 180.0,130.0 110.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
@tag :skip
test "05-033 - L/A2h-3-2: the interior (at a non-vertex) of a line touching the closing points of two connected holes in a polygon" do
a = "LINESTRING (80 110, 180 110)" |> Geo.WKT.decode!()
b =
"POLYGON ((20 200, 240 200, 240 20, 20 20, 20 200),(130 110, 60 180, 60 40, 130 110),(130 110, 200 40, 200 180, 130 110))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
@tag :skip
test "05-033 - L/A2h-3-2: the interior (at a non-vertex) of a line touching the closing points of two connected holes in a polygon (float)" do
a = "LINESTRING(80.0 110.0,180.0 110.0)" |> Geo.WKT.decode!()
b =
"POLYGON((20.0 200.0,240.0 200.0,240.0 20.0,20.0 20.0,20.0 200.0),(130.0 110.0,60.0 180.0,60.0 40.0,130.0 110.0),(130.0 110.0,200.0 40.0,200.0 180.0,130.0 110.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
@tag :skip
test "05-034 - L/A2h-3-3: the interior (at a non-vertex) of a line touching the closing point and at a vertex of two connected holes in a polygon" do
a = "LINESTRING (80 110, 180 110)" |> Geo.WKT.decode!()
b =
"POLYGON ((20 200, 20 20, 240 20, 240 200, 20 200),(60 180, 130 110, 60 40, 60 180),(130 110, 200 40, 200 180, 130 110))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
@tag :skip
test "05-034 - L/A2h-3-3: the interior (at a non-vertex) of a line touching the closing point and at a vertex of two connected holes in a polygon (float)" do
a = "LINESTRING(80.0 110.0,180.0 110.0)" |> Geo.WKT.decode!()
b =
"POLYGON((20.0 200.0,20.0 20.0,240.0 20.0,240.0 200.0,20.0 200.0),(60.0 180.0,130.0 110.0,60.0 40.0,60.0 180.0),(130.0 110.0,200.0 40.0,200.0 180.0,130.0 110.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-035 - L/A2h-3-4: the interior (at a non-vertex) of a line touching the closing point and at a non-vertex of two connected holes in a polygon" do
a = "LINESTRING (80 110, 170 110)" |> Geo.WKT.decode!()
b =
"POLYGON ((20 200, 20 20, 240 20, 240 200, 20 200),(130 110, 60 40, 60 180, 130 110),(130 180, 130 40, 200 110, 130 180))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-035 - L/A2h-3-4: the interior (at a non-vertex) of a line touching the closing point and at a non-vertex of two connected holes in a polygon (float)" do
a = "LINESTRING(80.0 110.0,170.0 110.0)" |> Geo.WKT.decode!()
b =
"POLYGON((20.0 200.0,20.0 20.0,240.0 20.0,240.0 200.0,20.0 200.0),(130.0 110.0,60.0 40.0,60.0 180.0,130.0 110.0),(130.0 180.0,130.0 40.0,200.0 110.0,130.0 180.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-036 - L/A2h-3-5: the start point a line touching the closing point and a non-vertex of two connected holes in a polygon" do
a = "LINESTRING (80 110, 130 110, 170 110)" |> Geo.WKT.decode!()
b =
"POLYGON ((20 200, 20 20, 240 20, 240 200, 20 200),(130 110, 60 40, 60 180, 130 110),(130 180, 130 40, 200 110, 130 180))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-036 - L/A2h-3-5: the start point a line touching the closing point and a non-vertex of two connected holes in a polygon (float)" do
a = "LINESTRING(80.0 110.0,130.0 110.0,170.0 110.0)" |> Geo.WKT.decode!()
b =
"POLYGON((20.0 200.0,20.0 20.0,240.0 20.0,240.0 200.0,20.0 200.0),(130.0 110.0,60.0 40.0,60.0 180.0,130.0 110.0),(130.0 180.0,130.0 40.0,200.0 110.0,130.0 180.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
@tag :skip
test "05-037 - L/A2h-3-6: the interior (at a vertex) of a line touching the closing points of two connected holes in a polygon" do
a = "LINESTRING (80 110, 130 110, 180 110)" |> Geo.WKT.decode!()
b =
"POLYGON ((20 200, 240 200, 240 20, 20 20, 20 200),(130 110, 60 180, 60 40, 130 110),(130 110, 200 40, 200 180, 130 110))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
@tag :skip
test "05-037 - L/A2h-3-6: the interior (at a vertex) of a line touching the closing points of two connected holes in a polygon (float)" do
a = "LINESTRING(80.0 110.0,130.0 110.0,180.0 110.0)" |> Geo.WKT.decode!()
b =
"POLYGON((20.0 200.0,240.0 200.0,240.0 20.0,20.0 20.0,20.0 200.0),(130.0 110.0,60.0 180.0,60.0 40.0,130.0 110.0),(130.0 110.0,200.0 40.0,200.0 180.0,130.0 110.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
@tag :skip
test "05-038 - L/A2h-3-7: the interior (at a vertex) of a line touching the closing point and at a vertex of two connected holes in a polygon" do
a = "LINESTRING (80 110, 130 110, 180 110)" |> Geo.WKT.decode!()
b =
"POLYGON ((20 200, 20 20, 240 20, 240 200, 20 200),(60 180, 130 110, 60 40, 60 180),(130 110, 200 40, 200 180, 130 110))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
@tag :skip
test "05-038 - L/A2h-3-7: the interior (at a vertex) of a line touching the closing point and at a vertex of two connected holes in a polygon (float)" do
a = "LINESTRING(80.0 110.0,130.0 110.0,180.0 110.0)" |> Geo.WKT.decode!()
b =
"POLYGON((20.0 200.0,20.0 20.0,240.0 20.0,240.0 200.0,20.0 200.0),(60.0 180.0,130.0 110.0,60.0 40.0,60.0 180.0),(130.0 110.0,200.0 40.0,200.0 180.0,130.0 110.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-039 - L/A2h-3-8: the interior (at a vertex) of a line touching the closing point and at a non-vertex of two connected holes in a polygon" do
a = "LINESTRING (80 110, 130 110, 170 110)" |> Geo.WKT.decode!()
b =
"POLYGON ((20 200, 20 20, 240 20, 240 200, 20 200),(130 110, 60 40, 60 180, 130 110),(130 180, 130 40, 200 110, 130 180))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-039 - L/A2h-3-8: the interior (at a vertex) of a line touching the closing point and at a non-vertex of two connected holes in a polygon (float)" do
a = "LINESTRING(80.0 110.0,130.0 110.0,170.0 110.0)" |> Geo.WKT.decode!()
b =
"POLYGON((20.0 200.0,20.0 20.0,240.0 20.0,240.0 200.0,20.0 200.0),(130.0 110.0,60.0 40.0,60.0 180.0,130.0 110.0),(130.0 180.0,130.0 40.0,200.0 110.0,130.0 180.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-040 - L/mA-4-1: a line intersecting the interior and exterior of MultiPolygon" do
a = "LINESTRING (160 70, 320 230)" |> Geo.WKT.decode!()
b =
"MULTIPOLYGON (((140 110, 260 110, 170 20, 50 20, 140 110)),((300 270, 420 270, 340 190, 220 190, 300 270)))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-040 - L/mA-4-1: a line intersecting the interior and exterior of MultiPolygon (float)" do
a = "LINESTRING(160.0 70.0,320.0 230.0)" |> Geo.WKT.decode!()
b =
"MULTIPOLYGON(((140.0 110.0,260.0 110.0,170.0 20.0,50.0 20.0,140.0 110.0)),((300.0 270.0,420.0 270.0,340.0 190.0,220.0 190.0,300.0 270.0)))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-041 - L/mA-4-2: a line intersecting the interior and exterior of MultiPolygon" do
a = "LINESTRING (160 70, 200 110, 280 190, 320 230)" |> Geo.WKT.decode!()
b =
"MULTIPOLYGON (((140 110, 260 110, 170 20, 50 20, 140 110)),((300 270, 420 270, 340 190, 220 190, 300 270)))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-041 - L/mA-4-2: a line intersecting the interior and exterior of MultiPolygon (float)" do
a = "LINESTRING(160.0 70.0,200.0 110.0,280.0 190.0,320.0 230.0)" |> Geo.WKT.decode!()
b =
"MULTIPOLYGON(((140.0 110.0,260.0 110.0,170.0 20.0,50.0 20.0,140.0 110.0)),((300.0 270.0,420.0 270.0,340.0 190.0,220.0 190.0,300.0 270.0)))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
@tag :skip
test "05-042 - L/mA-5-1: a line within two connected polygons" do
a = "LINESTRING (70 50, 70 150)" |> Geo.WKT.decode!()
b =
"MULTIPOLYGON (((0 0, 0 100, 140 100, 140 0, 0 0)),((20 170, 70 100, 130 170, 20 170)))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
@tag :skip
test "05-042 - L/mA-5-1: a line within two connected polygons (float)" do
a = "LINESTRING(70.0 50.0,70.0 150.0)" |> Geo.WKT.decode!()
b =
"MULTIPOLYGON(((0.0 0.0,0.0 100.0,140.0 100.0,140.0 0.0,0.0 0.0)),((20.0 170.0,70.0 100.0,130.0 170.0,20.0 170.0)))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-043 - RL/A-3-1: a LinearRing touching a polygon's closing point" do
a = "LINESTRING (110 110, 20 200, 200 200, 110 110)" |> Geo.WKT.decode!()
b = "POLYGON ((20 20, 200 20, 110 110, 20 20))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-043 - RL/A-3-1: a LinearRing touching a polygon's closing point (float)" do
a = "LINESTRING(110.0 110.0,20.0 200.0,200.0 200.0,110.0 110.0)" |> Geo.WKT.decode!()
b = "POLYGON((20.0 20.0,200.0 20.0,110.0 110.0,20.0 20.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-044 - RL/A-3-2: a LinearRing touching a polygon's boundary at a non-vertex" do
a = "LINESTRING (150 70, 160 110, 200 60, 150 70)" |> Geo.WKT.decode!()
b = "POLYGON ((20 20, 200 20, 110 110, 20 20))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-044 - RL/A-3-2: a LinearRing touching a polygon's boundary at a non-vertex (float)" do
a = "LINESTRING(150.0 70.0,160.0 110.0,200.0 60.0,150.0 70.0)" |> Geo.WKT.decode!()
b = "POLYGON((20.0 20.0,200.0 20.0,110.0 110.0,20.0 20.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-045 - RL/A-3-3: a LinearRing touching a polygon's boundary at a non-vertex" do
a = "LINESTRING (80 60, 120 40, 120 70, 80 60)" |> Geo.WKT.decode!()
b =
"POLYGON ((110 110, 200 20, 20 20, 110 110),(110 90, 50 30, 170 30, 110 90))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-045 - RL/A-3-3: a LinearRing touching a polygon's boundary at a non-vertex (float)" do
a = "LINESTRING(80.0 60.0,120.0 40.0,120.0 70.0,80.0 60.0)" |> Geo.WKT.decode!()
b =
"POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0),(110.0 90.0,50.0 30.0,170.0 30.0,110.0 90.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-046 - RL/A-3-4: a LinearRing on the boundary of a polygon" do
a = "LINESTRING (20 20, 200 20, 110 110, 20 20)" |> Geo.WKT.decode!()
b = "POLYGON ((20 20, 200 20, 110 110, 20 20))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-046 - RL/A-3-4: a LinearRing on the boundary of a polygon (float)" do
a = "LINESTRING(20.0 20.0,200.0 20.0,110.0 110.0,20.0 20.0)" |> Geo.WKT.decode!()
b = "POLYGON((20.0 20.0,200.0 20.0,110.0 110.0,20.0 20.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-047 - RL/A-3-5: a LinearRing on the inner boundary of a polygon" do
a = "LINESTRING (110 90, 170 30, 50 30, 110 90)" |> Geo.WKT.decode!()
b =
"POLYGON ((110 110, 200 20, 20 20, 110 110),(110 90, 50 30, 170 30, 110 90))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-047 - RL/A-3-5: a LinearRing on the inner boundary of a polygon (float)" do
a = "LINESTRING(110.0 90.0,170.0 30.0,50.0 30.0,110.0 90.0)" |> Geo.WKT.decode!()
b =
"POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0),(110.0 90.0,50.0 30.0,170.0 30.0,110.0 90.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-048 - RL/A-3-6: a LinearRing on the inner boundary of a polygon" do
a = "LINESTRING (110 110, 170 50, 170 110, 110 110)" |> Geo.WKT.decode!()
b =
"POLYGON ((110 110, 200 20, 20 20, 110 110),(110 90, 50 30, 170 30, 110 90))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-048 - RL/A-3-6: a LinearRing on the inner boundary of a polygon (float)" do
a = "LINESTRING(110.0 110.0,170.0 50.0,170.0 110.0,110.0 110.0)" |> Geo.WKT.decode!()
b =
"POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0),(110.0 90.0,50.0 30.0,170.0 30.0,110.0 90.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-049 - RL/A-3-7: a LinearRing on the inner boundary of a polygon" do
a = "LINESTRING (110 90, 70 50, 130 50, 110 90)" |> Geo.WKT.decode!()
b =
"POLYGON ((110 110, 200 20, 20 20, 110 110),(110 90, 50 30, 170 30, 110 90))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-049 - RL/A-3-7: a LinearRing on the inner boundary of a polygon (float)" do
a = "LINESTRING(110.0 90.0,70.0 50.0,130.0 50.0,110.0 90.0)" |> Geo.WKT.decode!()
b =
"POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0),(110.0 90.0,50.0 30.0,170.0 30.0,110.0 90.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-050 - RL/A-4-1: a LinearRing crossing a polygon" do
a = "LINESTRING (110 60, 20 150, 200 150, 110 60)" |> Geo.WKT.decode!()
b = "POLYGON ((20 20, 200 20, 110 110, 20 20))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-050 - RL/A-4-1: a LinearRing crossing a polygon (float)" do
a = "LINESTRING(110.0 60.0,20.0 150.0,200.0 150.0,110.0 60.0)" |> Geo.WKT.decode!()
b = "POLYGON((20.0 20.0,200.0 20.0,110.0 110.0,20.0 20.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-051 - RL/A-4-2: a LinearRing crossing a polygon with a hole" do
a = "LINESTRING (110 130, 110 70, 200 100, 110 130)" |> Geo.WKT.decode!()
b =
"POLYGON ((110 110, 200 20, 20 20, 110 110),(110 90, 50 30, 170 30, 110 90))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-051 - RL/A-4-2: a LinearRing crossing a polygon with a hole (float)" do
a = "LINESTRING(110.0 130.0,110.0 70.0,200.0 100.0,110.0 130.0)" |> Geo.WKT.decode!()
b =
"POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0),(110.0 90.0,50.0 30.0,170.0 30.0,110.0 90.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-052 - RL/A-5-1: a LinearRing within a polygon" do
a = "LINESTRING (110 90, 160 40, 60 40, 110 90)" |> Geo.WKT.decode!()
b = "POLYGON ((20 20, 200 20, 110 110, 20 20))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-052 - RL/A-5-1: a LinearRing within a polygon (float)" do
a = "LINESTRING(110.0 90.0,160.0 40.0,60.0 40.0,110.0 90.0)" |> Geo.WKT.decode!()
b = "POLYGON((20.0 20.0,200.0 20.0,110.0 110.0,20.0 20.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-053 - RL/A-5-2: a LinearRing within a polygon with a hole" do
a = "LINESTRING (110 100, 40 30, 180 30, 110 100)" |> Geo.WKT.decode!()
b =
"POLYGON ((110 110, 200 20, 20 20, 110 110),(110 90, 60 40, 160 40, 110 90))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-053 - RL/A-5-2: a LinearRing within a polygon with a hole (float)" do
a = "LINESTRING(110.0 100.0,40.0 30.0,180.0 30.0,110.0 100.0)" |> Geo.WKT.decode!()
b =
"POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0),(110.0 90.0,60.0 40.0,160.0 40.0,110.0 90.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-054 - RL/A-5-3: a LinearRing within a polygon with a hole" do
a = "LINESTRING (110 110, 180 30, 40 30, 110 110)" |> Geo.WKT.decode!()
b =
"POLYGON ((110 110, 200 20, 20 20, 110 110),(110 90, 60 40, 160 40, 110 90))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-054 - RL/A-5-3: a LinearRing within a polygon with a hole (float)" do
a = "LINESTRING(110.0 110.0,180.0 30.0,40.0 30.0,110.0 110.0)" |> Geo.WKT.decode!()
b =
"POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0),(110.0 90.0,60.0 40.0,160.0 40.0,110.0 90.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-055 - RL/A-5-4: a LinearRing within a polygon with a hole" do
a = "LINESTRING (110 90, 180 30, 40 30, 110 90)" |> Geo.WKT.decode!()
b =
"POLYGON ((110 110, 200 20, 20 20, 110 110),(110 90, 60 40, 160 40, 110 90))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-055 - RL/A-5-4: a LinearRing within a polygon with a hole (float)" do
a = "LINESTRING(110.0 90.0,180.0 30.0,40.0 30.0,110.0 90.0)" |> Geo.WKT.decode!()
b =
"POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0),(110.0 90.0,60.0 40.0,160.0 40.0,110.0 90.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-056 - RL/A-5-5: a LinearRing within a polygon with a hole" do
a = "LINESTRING (110 90, 50 30, 180 30, 110 90)" |> Geo.WKT.decode!()
b =
"POLYGON ((110 110, 200 20, 20 20, 110 110),(110 90, 60 40, 160 40, 110 90))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-056 - RL/A-5-5: a LinearRing within a polygon with a hole (float)" do
a = "LINESTRING(110.0 90.0,50.0 30.0,180.0 30.0,110.0 90.0)" |> Geo.WKT.decode!()
b =
"POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0),(110.0 90.0,60.0 40.0,160.0 40.0,110.0 90.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-057 - nsL/A-3-1: a non-simple LineString touching a polygon" do
a = "LINESTRING (110 110, 200 200, 200 110, 110 200)" |> Geo.WKT.decode!()
b = "POLYGON ((110 110, 200 20, 20 20, 110 110))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-057 - nsL/A-3-1: a non-simple LineString touching a polygon (float)" do
a = "LINESTRING(110.0 110.0,200.0 200.0,200.0 110.0,110.0 200.0)" |> Geo.WKT.decode!()
b = "POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-058 - nsL/A-3-2: a non-simple LineString touching a polygon" do
a = "LINESTRING (110 110, 200 200, 110 110, 20 200, 20 110, 200 110)" |> Geo.WKT.decode!()
b = "POLYGON ((110 110, 200 20, 20 20, 110 110))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-058 - nsL/A-3-2: a non-simple LineString touching a polygon (float)" do
a =
"LINESTRING(110.0 110.0,200.0 200.0,110.0 110.0,20.0 200.0,20.0 110.0,200.0 110.0)"
|> Geo.WKT.decode!()
b = "POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-059 - nsL/A-3-3: a non-simple LineString touching a polygon" do
a = "LINESTRING (110 110, 20 110, 200 110, 50 110, 110 170)" |> Geo.WKT.decode!()
b = "POLYGON ((110 110, 200 20, 20 20, 110 110))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-059 - nsL/A-3-3: a non-simple LineString touching a polygon (float)" do
a =
"LINESTRING(110.0 110.0,20.0 110.0,200.0 110.0,50.0 110.0,110.0 170.0)" |> Geo.WKT.decode!()
b = "POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-060 - nsL/A-3-4: a non-simple LineString touching a polygon" do
a = "LINESTRING (110 110, 20 200, 110 200, 110 110, 200 200)" |> Geo.WKT.decode!()
b = "POLYGON ((110 110, 200 20, 20 20, 110 110))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-060 - nsL/A-3-4: a non-simple LineString touching a polygon (float)" do
a =
"LINESTRING(110.0 110.0,20.0 200.0,110.0 200.0,110.0 110.0,200.0 200.0)"
|> Geo.WKT.decode!()
b = "POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-061 - nsL/A-3-5: a non-simple LineString touching a polygon" do
a = "LINESTRING (110 110, 170 50, 20 200, 20 110, 200 110)" |> Geo.WKT.decode!()
b = "POLYGON ((110 110, 200 20, 20 20, 110 110))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-061 - nsL/A-3-5: a non-simple LineString touching a polygon (float)" do
a =
"LINESTRING(110.0 110.0,170.0 50.0,20.0 200.0,20.0 110.0,200.0 110.0)" |> Geo.WKT.decode!()
b = "POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-062 - nsL/A-4-1: a non-simple LineString crossing a polygon" do
a = "LINESTRING (110 110, 180 40, 110 40, 110 180)" |> Geo.WKT.decode!()
b = "POLYGON ((110 110, 200 20, 20 20, 110 110))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-062 - nsL/A-4-1: a non-simple LineString crossing a polygon (float)" do
a = "LINESTRING(110.0 110.0,180.0 40.0,110.0 40.0,110.0 180.0)" |> Geo.WKT.decode!()
b = "POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-063 - nsL/A-5-1: a non-simple LineString within a polygon" do
a = "LINESTRING (110 60, 50 30, 170 30, 90 70)" |> Geo.WKT.decode!()
b = "POLYGON ((110 110, 200 20, 20 20, 110 110))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-063 - nsL/A-5-1: a non-simple LineString within a polygon (float)" do
a = "LINESTRING(110.0 60.0,50.0 30.0,170.0 30.0,90.0 70.0)" |> Geo.WKT.decode!()
b = "POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-064 - nsL/A-5-2: a non-simple LineString within a polygon" do
a = "LINESTRING (110 110, 180 40, 110 40, 110 110, 70 40)" |> Geo.WKT.decode!()
b = "POLYGON ((110 110, 200 20, 20 20, 110 110))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-064 - nsL/A-5-2: a non-simple LineString within a polygon (float)" do
a = "LINESTRING(110.0 110.0,180.0 40.0,110.0 40.0,110.0 110.0,70.0 40.0)" |> Geo.WKT.decode!()
b = "POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-065 - nsL/Ah: the self-crossing point of a non-simple LineString touching the closing point of the inner boundary of a polygon" do
a = "LINESTRING (230 70, 170 120, 190 60, 140 60, 170 120, 270 90)" |> Geo.WKT.decode!()
b =
"POLYGON ((150 150, 410 150, 280 20, 20 20, 150 150),(170 120, 330 120, 260 50, 100 50, 170 120))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-065 - nsL/Ah: the self-crossing point of a non-simple LineString touching the closing point of the inner boundary of a polygon (float)" do
a =
"LINESTRING(230.0 70.0,170.0 120.0,190.0 60.0,140.0 60.0,170.0 120.0,270.0 90.0)"
|> Geo.WKT.decode!()
b =
"POLYGON((150.0 150.0,410.0 150.0,280.0 20.0,20.0 20.0,150.0 150.0),(170.0 120.0,330.0 120.0,260.0 50.0,100.0 50.0,170.0 120.0))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-066 - mL/A-3-1: MultiLineString touching a polygon's closing point" do
a =
"MULTILINESTRING ((20 110, 200 110),(200 200, 110 110, 20 210, 110 110))"
|> Geo.WKT.decode!()
b = "POLYGON ((110 110, 200 20, 20 20, 110 110))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-066 - mL/A-3-1: MultiLineString touching a polygon's closing point (float)" do
a =
"MULTILINESTRING((20.0 110.0,200.0 110.0),(200.0 200.0,110.0 110.0,20.0 210.0,110.0 110.0))"
|> Geo.WKT.decode!()
b = "POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-067 - mL/A-3-2: MultiLineString touching a polygon's closing point" do
a =
"MULTILINESTRING ((20 110, 200 110),(60 180, 60 110, 160 110, 110 110))"
|> Geo.WKT.decode!()
b = "POLYGON ((110 110, 200 20, 20 20, 110 110))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-067 - mL/A-3-2: MultiLineString touching a polygon's closing point (float)" do
a =
"MULTILINESTRING((20.0 110.0,200.0 110.0),(60.0 180.0,60.0 110.0,160.0 110.0,110.0 110.0))"
|> Geo.WKT.decode!()
b = "POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-068 - mL/A-3-3: MultiLineString touching a polygon's closing point" do
a =
"MULTILINESTRING ((20 110, 200 110),(200 200, 110 110, 20 200, 110 200, 110 110))"
|> Geo.WKT.decode!()
b = "POLYGON ((110 110, 200 20, 20 20, 110 110))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-068 - mL/A-3-3: MultiLineString touching a polygon's closing point (float)" do
a =
"MULTILINESTRING((20.0 110.0,200.0 110.0),(200.0 200.0,110.0 110.0,20.0 200.0,110.0 200.0,110.0 110.0))"
|> Geo.WKT.decode!()
b = "POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-069 - mL/A-4-1: MultiLineString crossing a polygon" do
a =
"MULTILINESTRING ((20 110, 200 110),(110 50, 110 170, 110 70, 110 150, 200 150))"
|> Geo.WKT.decode!()
b = "POLYGON ((110 110, 200 20, 20 20, 110 110))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-069 - mL/A-4-1: MultiLineString crossing a polygon (float)" do
a =
"MULTILINESTRING((20.0 110.0,200.0 110.0),(110.0 50.0,110.0 170.0,110.0 70.0,110.0 150.0,200.0 150.0))"
|> Geo.WKT.decode!()
b = "POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-070 - mL/A-4-2: MultiLineString crossing a polygon" do
a =
"MULTILINESTRING ((20 110, 200 110),(50 110, 170 110, 110 170, 110 50, 110 170, 110 50))"
|> Geo.WKT.decode!()
b = "POLYGON ((110 110, 200 20, 20 20, 110 110))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-070 - mL/A-4-2: MultiLineString crossing a polygon (float)" do
a =
"MULTILINESTRING((20.0 110.0,200.0 110.0),(50.0 110.0,170.0 110.0,110.0 170.0,110.0 50.0,110.0 170.0,110.0 50.0))"
|> Geo.WKT.decode!()
b = "POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-071 - mL/A-4-3: MultiLineString crossing a polygon" do
a = "MULTILINESTRING ((20 110, 200 110),(110 60, 110 160, 200 160))" |> Geo.WKT.decode!()
b = "POLYGON ((110 110, 200 20, 20 20, 110 110))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-071 - mL/A-4-3: MultiLineString crossing a polygon (float)" do
a =
"MULTILINESTRING((20.0 110.0,200.0 110.0),(110.0 60.0,110.0 160.0,200.0 160.0))"
|> Geo.WKT.decode!()
b = "POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-072 - mL/A-4-4: MultiLineString crossing a polygon" do
a = "MULTILINESTRING ((20 110, 200 110),(110 60, 110 160, 200 160))" |> Geo.WKT.decode!()
b = "POLYGON ((110 110, 200 20, 20 20, 110 110))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-072 - mL/A-4-4: MultiLineString crossing a polygon (float)" do
a =
"MULTILINESTRING((20.0 110.0,200.0 110.0),(110.0 60.0,110.0 160.0,200.0 160.0))"
|> Geo.WKT.decode!()
b = "POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-073 - mL/A-5-1: MultiLineString within a polygon" do
a = "MULTILINESTRING ((110 100, 40 30, 180 30),(170 30, 110 90, 50 30))" |> Geo.WKT.decode!()
b = "POLYGON ((110 110, 200 20, 20 20, 110 110))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-073 - mL/A-5-1: MultiLineString within a polygon (float)" do
a =
"MULTILINESTRING((110.0 100.0,40.0 30.0,180.0 30.0),(170.0 30.0,110.0 90.0,50.0 30.0))"
|> Geo.WKT.decode!()
b = "POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-074 - mL/A-5-2: MultiLineString within a polygon" do
a =
"MULTILINESTRING ((110 110, 60 40, 70 20, 150 20, 170 40),(180 30, 40 30, 110 80))"
|> Geo.WKT.decode!()
b = "POLYGON ((110 110, 200 20, 20 20, 110 110))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-074 - mL/A-5-2: MultiLineString within a polygon (float)" do
a =
"MULTILINESTRING((110.0 110.0,60.0 40.0,70.0 20.0,150.0 20.0,170.0 40.0),(180.0 30.0,40.0 30.0,110.0 80.0))"
|> Geo.WKT.decode!()
b = "POLYGON((110.0 110.0,200.0 20.0,20.0 20.0,110.0 110.0))" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-075 - mL/mA-3-1: MultiLineString within a MultiPolygon" do
a =
"MULTILINESTRING ((20 110, 200 110, 200 160),(110 110, 200 110, 200 70, 20 150))"
|> Geo.WKT.decode!()
b =
"MULTIPOLYGON (((110 110, 20 20, 200 20, 110 110)),((110 110, 20 200, 200 200, 110 110)))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-075 - mL/mA-3-1: MultiLineString within a MultiPolygon (float)" do
a =
"MULTILINESTRING((20.0 110.0,200.0 110.0,200.0 160.0),(110.0 110.0,200.0 110.0,200.0 70.0,20.0 150.0))"
|> Geo.WKT.decode!()
b =
"MULTIPOLYGON(((110.0 110.0,20.0 20.0,200.0 20.0,110.0 110.0)),((110.0 110.0,20.0 200.0,200.0 200.0,110.0 110.0)))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-076 - mL/mA-3-2: MultiLineString within a MultiPolygon" do
a =
"MULTILINESTRING ((20 160, 70 110, 150 110, 200 160),(110 110, 20 110, 50 80, 70 110, 200 110))"
|> Geo.WKT.decode!()
b =
"MULTIPOLYGON (((110 110, 20 20, 200 20, 110 110)),((110 110, 20 200, 200 200, 110 110)))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-076 - mL/mA-3-2: MultiLineString within a MultiPolygon (float)" do
a =
"MULTILINESTRING((20.0 160.0,70.0 110.0,150.0 110.0,200.0 160.0),(110.0 110.0,20.0 110.0,50.0 80.0,70.0 110.0,200.0 110.0))"
|> Geo.WKT.decode!()
b =
"MULTIPOLYGON(((110.0 110.0,20.0 20.0,200.0 20.0,110.0 110.0)),((110.0 110.0,20.0 200.0,200.0 200.0,110.0 110.0)))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-077 - mL/mA-3-3: MultiLineString within a MultiPolygon" do
a =
"MULTILINESTRING ((20 110, 200 110),(110 110, 20 170, 20 130, 200 90))" |> Geo.WKT.decode!()
b =
"MULTIPOLYGON (((110 110, 20 20, 200 20, 110 110)),((110 110, 20 200, 200 200, 110 110)))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "05-077 - mL/mA-3-3: MultiLineString within a MultiPolygon (float)" do
a =
"MULTILINESTRING((20.0 110.0,200.0 110.0),(110.0 110.0,20.0 170.0,20.0 130.0,200.0 90.0))"
|> Geo.WKT.decode!()
b =
"MULTIPOLYGON(((110.0 110.0,20.0 20.0,200.0 20.0,110.0 110.0)),((110.0 110.0,20.0 200.0,200.0 200.0,110.0 110.0)))"
|> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
end
| 38.507468 | 169 | 0.606159 |
ff632b127cc401b2a7956dc6fb0dc95b662d1464 | 991 | ex | Elixir | lib/hunter/config.ex | milmazz/hunter | f84fd72ffae067da5b0ef24a86e780d3ef70ee01 | [
"Apache-2.0"
] | 38 | 2017-04-09T16:43:58.000Z | 2021-10-30T00:47:41.000Z | lib/hunter/config.ex | milmazz/hunter | f84fd72ffae067da5b0ef24a86e780d3ef70ee01 | [
"Apache-2.0"
] | 51 | 2017-04-14T13:02:42.000Z | 2022-02-28T11:16:44.000Z | lib/hunter/config.ex | milmazz/hunter | f84fd72ffae067da5b0ef24a86e780d3ef70ee01 | [
"Apache-2.0"
] | 8 | 2017-04-14T12:45:18.000Z | 2020-09-04T23:08:30.000Z | defmodule Hunter.Config do
@moduledoc """
Hunter configuration.
"""
@doc """
Returns adapter module to do run API calls.
## Examples
iex> Hunter.Config.hunter_api()
Hunter.ApiMock
"""
def hunter_api do
Application.get_env(:hunter, :hunter_api, Hunter.Api.HTTPClient)
end
@doc """
Returns the API base URL
## Examples
iex> Hunter.Config.api_base_url()
"https://mastodon.social"
"""
def api_base_url do
Application.get_env(:hunter, :api_base_url, "https://mastodon.social")
end
@doc """
Returns the Hunter home directory
## Examples
iex> Path.basename(Hunter.Config.home())
".hunter"
"""
def home do
home = System.get_env("HUNTER_HOME") || Application.get_env(:hunter, :home, "~/.hunter")
Path.expand(home)
end
@doc """
Returns HTTP options
iex> Hunter.Config.http_options()
[]
"""
def http_options do
Application.get_env(:hunter, :http_options, [])
end
end
| 17.385965 | 92 | 0.635721 |
ff6330aa356b338524e32ee9484694b2fd81422f | 1,468 | ex | Elixir | deps/gettext/lib/gettext/po/translation.ex | pdamoc/cowboy_issue | 3873ffed5cc2b15ead54b2d3e95575b09ba1833a | [
"MIT"
] | null | null | null | deps/gettext/lib/gettext/po/translation.ex | pdamoc/cowboy_issue | 3873ffed5cc2b15ead54b2d3e95575b09ba1833a | [
"MIT"
] | null | null | null | deps/gettext/lib/gettext/po/translation.ex | pdamoc/cowboy_issue | 3873ffed5cc2b15ead54b2d3e95575b09ba1833a | [
"MIT"
] | null | null | null | defmodule Gettext.PO.Translation do
@moduledoc """
A struct that holds information on a translation.
This struct describes a translation that has no plural form, such as the one
in the following snippet of `.po` file:
msgid "Hello world!"
msgstr "Ciao mondo!"
Translations with a plural form are not represented as
`Gettext.PO.Translation` structs, but as `Gettext.PO.PluralTranslation`
structs.
This struct contains the following fields:
* `msgid` - the id of the translation.
* `msgstr` - the translated string.
* `comments` - a list of comments as they are found in the PO file (for example,
`["# foo"]`).
* `extracted_comments` - a list of extracted comments (for example,
`["#. foo", "#. bar"]`).
* `references` - a list of references (files this translation comes from) in
the form `{file, line}`.
* `flags` - a set of flags for this translation.
* `po_source_line` - the line this translation is on in the PO file where it
comes from.
"""
@type t :: %__MODULE__{
msgid: [binary],
msgstr: [binary],
comments: [binary],
extracted_comments: [binary],
references: [{binary, pos_integer}],
flags: MapSet.t,
po_source_line: pos_integer,
}
defstruct msgid: nil,
msgstr: nil,
comments: [],
extracted_comments: [],
references: [],
flags: MapSet.new,
po_source_line: nil
end
| 29.959184 | 84 | 0.632834 |
ff633da4cbc14ad3d97767e0a6a992e8f2849c7f | 3,651 | exs | Elixir | apps/examples/test/examples/ping_pong/advisor_test.exs | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | null | null | null | apps/examples/test/examples/ping_pong/advisor_test.exs | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | 78 | 2020-10-12T06:21:43.000Z | 2022-03-28T09:02:00.000Z | apps/examples/test/examples/ping_pong/advisor_test.exs | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | null | null | null | defmodule Examples.PingPong.AdvisorTest do
use Tai.TestSupport.E2ECase, async: false
alias Tai.NewOrders.{OrderRepo, Order, Transitions}
@scenario :ping_pong
@venue :test_exchange_a
@product :btc_usd
def before_start_app, do: seed_mock_responses(@scenario)
def after_start_app, do: seed_venues(@scenario)
def after_boot_app do
start_venue(@venue)
configure_advisor_group(@scenario)
start_advisors(where: [group_id: @scenario])
end
test "create a passive buy entry order and flip it to a passive sell order upon fill" do
# create an entry maker limit order on the first quote
push_stream_market_data({@scenario, :snapshot, @venue, @product})
assert_receive {:order_updated, entry_order_1_client_id, %Transitions.AcceptCreate{}}
entry_order_1 = OrderRepo.get!(Order, entry_order_1_client_id)
assert entry_order_1.side == :buy
assert entry_order_1.status == :create_accepted
assert entry_order_1.price == Decimal.new("5500.5")
assert entry_order_1.qty == Decimal.new(10)
assert entry_order_1.leaves_qty == Decimal.new(10)
push_stream_order_update(
{@scenario, :entry_order_1_open, @venue, @product},
entry_order_1_client_id
)
assert_receive {:order_updated, ^entry_order_1_client_id, %Transitions.Open{}}
open_entry_order_1 = OrderRepo.get!(Order, entry_order_1_client_id)
assert open_entry_order_1.status == :open
assert open_entry_order_1.qty == Decimal.new(10)
assert open_entry_order_1.leaves_qty == Decimal.new(10)
# cancel and replace the entry order when the inside quote changes
push_stream_market_data({@scenario, :change_1, @venue, @product})
assert_receive {:order_updated, ^entry_order_1_client_id, %Transitions.AcceptCancel{}}
push_stream_order_update(
{@scenario, :entry_order_1_cancel, @venue, @product},
entry_order_1_client_id
)
assert_receive {:order_updated, ^entry_order_1_client_id, %Transitions.Cancel{}}
assert_receive {:order_updated, order_2_client_id, %Transitions.AcceptCreate{}}
order_2 = OrderRepo.get!(Order, order_2_client_id)
assert order_2.side == :buy
assert order_2.status == :create_accepted
assert order_2.price == Decimal.new("5504")
assert order_2.qty == Decimal.new(10)
push_stream_order_update(
{@scenario, :entry_order_2_open, @venue, @product},
order_2_client_id
)
assert_receive {:order_updated, ^order_2_client_id, %Transitions.Open{}}
# create an exit maker limit order when the entry is filled
push_stream_order_update(
{@scenario, :order_update_filled, @venue, @product},
order_2_client_id
)
assert_receive {:order_updated, ^order_2_client_id, %Transitions.Fill{}}
filled_entry_order = OrderRepo.get!(Order, order_2_client_id)
assert filled_entry_order.status == :filled
assert filled_entry_order.cumulative_qty == Decimal.new(10)
assert filled_entry_order.qty == Decimal.new(10)
assert filled_entry_order.leaves_qty == Decimal.new(0)
assert_receive {:order_updated, exit_order_1_client_id, %Transitions.AcceptCreate{}}
exit_order_1 = OrderRepo.get!(Order, exit_order_1_client_id)
assert exit_order_1.side == :sell
assert exit_order_1.status == :create_accepted
assert exit_order_1.price == Decimal.new("5504.5")
assert exit_order_1.qty == Decimal.new(10)
assert exit_order_1.leaves_qty == Decimal.new(10)
# clean up unfilled entry/exit orders when the advisor shuts down
stop_advisors(where: [group_id: @scenario])
assert_receive {:order_updated, ^exit_order_1_client_id, %Transitions.AcceptCancel{}}
end
end
| 41.022472 | 90 | 0.743906 |
ff634f44bbbd59fab374d17c5c6171573cf8caff | 520 | ex | Elixir | samples/client/petstore/elixir/lib/swagger_petstore/model/user.ex | bruceadams/swagger-codegen-1 | 2e5289c4d74eafd48e3a324ccdd9e39323b5fb06 | [
"Apache-2.0"
] | null | null | null | samples/client/petstore/elixir/lib/swagger_petstore/model/user.ex | bruceadams/swagger-codegen-1 | 2e5289c4d74eafd48e3a324ccdd9e39323b5fb06 | [
"Apache-2.0"
] | null | null | null | samples/client/petstore/elixir/lib/swagger_petstore/model/user.ex | bruceadams/swagger-codegen-1 | 2e5289c4d74eafd48e3a324ccdd9e39323b5fb06 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule SwaggerPetstore.Model.User do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"id",
:"username",
:"firstName",
:"lastName",
:"email",
:"password",
:"phone",
:"userStatus"
]
end
defimpl Poison.Decoder, for: SwaggerPetstore.Model.User do
def decode(value, _options) do
value
end
end
| 17.931034 | 75 | 0.653846 |
ff635f85706c1675b3ba824ff913a878d0db53a7 | 1,929 | ex | Elixir | web/models/payment.ex | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | 356 | 2016-03-16T12:37:28.000Z | 2021-12-18T03:22:39.000Z | web/models/payment.ex | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | 30 | 2016-03-16T09:19:10.000Z | 2021-01-12T08:10:52.000Z | web/models/payment.ex | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | 72 | 2016-03-16T13:32:14.000Z | 2021-03-23T11:27:43.000Z | defmodule Nectar.Payment do
use Nectar.Web, :model
alias __MODULE__
schema "payments" do
belongs_to :order, Nectar.Order
belongs_to :payment_method, Nectar.PaymentMethod
field :amount, :decimal
field :payment_state, :string, default: "authorized"
field :transaction_id
timestamps()
extensions()
end
#@payment_states ~w(authorized captured refunded)
@required_fields ~w(payment_method_id amount payment_state)a
@optional_fields ~w(transaction_id)a
def authorized?(%Payment{payment_state: "authorized"}), do: true
def authorized?(%Payment{}), do: false
def captured?(%Payment{payment_state: "captured"}), do: true
def captured?(%Payment{}), do: false
def refunded?(%Payment{payment_state: "refunded"}), do: true
def refunded?(%Payment{}), do: false
def changeset(model, params \\ %{}) do
model
|> cast(params, @required_fields ++ @optional_fields)
|> validate_required(@required_fields)
end
# TODO: can we add errors while payment authorisation here ??
def applicable_payment_changeset(model, params) do
model
|> cast(params, @required_fields ++ @optional_fields)
|> validate_required(@required_fields)
end
@required_fields ~w(payment_state)a
@optional_fields ~w()a
def capture_changeset(model) do
model
|> cast(%{"payment_state" => "captured"}, @required_fields ++ @optional_fields)
|> validate_required(@required_fields)
end
@required_fields ~w(payment_state)a
@optional_fields ~w()a
def refund_changeset(model) do
model
|> cast(%{"payment_state" => "refunded"}, @required_fields ++ @optional_fields)
|> validate_required(@required_fields)
end
@required_fields ~w(transaction_id)a
@optional_fields ~w()a
def transaction_id_changeset(model, params \\ %{}) do
model
|> cast(params, @required_fields ++ @optional_fields)
|> validate_required(@required_fields)
end
end
| 27.557143 | 83 | 0.708657 |
ff63c39f081e7f2b681b7ebaeb326b6bcfaeeb6e | 580 | ex | Elixir | lib/consumer.ex | TenTakano/Senkosan | 4ecddece5eb69ea5c3d10bd70e44c8c224b03a29 | [
"MIT"
] | null | null | null | lib/consumer.ex | TenTakano/Senkosan | 4ecddece5eb69ea5c3d10bd70e44c8c224b03a29 | [
"MIT"
] | 5 | 2020-08-10T21:45:19.000Z | 2020-08-10T22:05:14.000Z | lib/consumer.ex | TenTakano/Senkosan | 4ecddece5eb69ea5c3d10bd70e44c8c224b03a29 | [
"MIT"
] | null | null | null | defmodule Senkosan.Consumer do
@moduledoc false
use Nostrum.Consumer
alias Nostrum.Api
alias Senkosan.Util
alias Senkosan.VoiceState
@default_text_channel Application.get_env(:senkosan, :default_text_channel)
def start_link do
Consumer.start_link(__MODULE__)
end
def handle_event({:VOICE_STATE_UPDATE, msg, _}) do
if VoiceState.process_transition(msg) == :join do
Util.apply_bot_usage(msg.user_id, fn ->
Api.create_message(@default_text_channel, "おかえりなのじゃ!")
end)
end
end
def handle_event(_event) do
:noop
end
end
| 20.714286 | 77 | 0.727586 |
ff63fa2bc1ede42e484a4aa645685db42e39653f | 1,654 | ex | Elixir | hello/lib/hello_web/endpoint.ex | nash-elixir/intro-to-phoenix | 46e78713a0e8dcd895ad4f22d75b40c55ae41435 | [
"MIT"
] | 1 | 2018-01-12T20:21:56.000Z | 2018-01-12T20:21:56.000Z | hello/lib/hello_web/endpoint.ex | nash-elixir/intro-to-phoenix | 46e78713a0e8dcd895ad4f22d75b40c55ae41435 | [
"MIT"
] | null | null | null | hello/lib/hello_web/endpoint.ex | nash-elixir/intro-to-phoenix | 46e78713a0e8dcd895ad4f22d75b40c55ae41435 | [
"MIT"
] | null | null | null | defmodule HelloWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :hello
socket "/socket", HelloWeb.UserSocket
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/", from: :hello, gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Poison
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_hello_key",
signing_salt: "85JzVD58"
plug HelloWeb.Router
@doc """
Callback invoked for dynamically configuring the endpoint.
It receives the endpoint configuration and checks if
configuration should be loaded from the system environment.
"""
def init(_key, config) do
if config[:load_from_system_env] do
port = System.get_env("PORT") || raise "expected the PORT environment variable to be set"
{:ok, Keyword.put(config, :http, [:inet6, port: port])}
else
{:ok, config}
end
end
end
| 28.517241 | 95 | 0.705562 |
ff63ffcd8d534dc25544b0488db850ed76bd0cfd | 186 | exs | Elixir | test/support/fixtures.exs | eeng/blaine | d91d94034925d64632789925ef90a198c0a1d8ea | [
"MIT"
] | null | null | null | test/support/fixtures.exs | eeng/blaine | d91d94034925d64632789925ef90a198c0a1d8ea | [
"MIT"
] | null | null | null | test/support/fixtures.exs | eeng/blaine | d91d94034925d64632789925ef90a198c0a1d8ea | [
"MIT"
] | null | null | null | defmodule Watchlater.Support.Fixtures do
def fixture(file) do
Path.dirname(__ENV__.file)
|> Path.join(["fixtures/", file])
|> File.read!()
|> Jason.decode!()
end
end
| 20.666667 | 40 | 0.639785 |
ff6401f10a4603d4f77032e7355c09a4dcdcb81b | 1,860 | ex | Elixir | lib/oban/plugins/repeater.ex | chrismo/oban | f912ccf75a1d89e02229041d578f9263d4de0232 | [
"Apache-2.0"
] | null | null | null | lib/oban/plugins/repeater.ex | chrismo/oban | f912ccf75a1d89e02229041d578f9263d4de0232 | [
"Apache-2.0"
] | 26 | 2021-07-24T21:32:21.000Z | 2022-03-23T11:55:24.000Z | lib/oban/plugins/repeater.ex | chrismo/oban | f912ccf75a1d89e02229041d578f9263d4de0232 | [
"Apache-2.0"
] | null | null | null | defmodule Oban.Plugins.Repeater do
@moduledoc """
Repeatedly send inserted messages to all registered producers to simulate polling.
This plugin is only necessary if you're running Oban in an environment where Postgres
notifications don't work, notably one of:
* Using a database connection pooler in transaction mode, i.e. pg_bouncer.
* Integration testing within the Ecto sandbox, i.e. developing Oban plugins
## Options
* `:interval` — the number of milliseconds between notifications. The default is `1_000ms`.
"""
use GenServer
alias Oban.Config
@type option :: {:conf, Config.t()} | {:name, GenServer.name()} | {:interval, pos_integer()}
defmodule State do
@moduledoc false
defstruct [:conf, :name, :timer, interval: :timer.seconds(1)]
end
@doc false
@spec start_link([option()]) :: GenServer.on_start()
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: opts[:name])
end
@impl GenServer
def init(opts) do
Process.flag(:trap_exit, true)
state = struct!(State, opts)
{:ok, schedule_notify(state)}
end
@impl GenServer
def terminate(_reason, %State{timer: timer}) do
if is_reference(timer), do: Process.cancel_timer(timer)
:ok
end
@impl GenServer
def handle_info(:notify, %State{} = state) do
match = [{{{state.conf.name, {:producer, :"$1"}}, :"$2", :_}, [], [{{:"$1", :"$2"}}]}]
meta = %{conf: state.conf, plugin: __MODULE__}
:telemetry.span([:oban, :plugin], meta, fn ->
for {queue, pid} <- Registry.select(Oban.Registry, match) do
send(pid, {:notification, :insert, %{"queue" => queue}})
end
{:ok, meta}
end)
{:noreply, schedule_notify(state)}
end
defp schedule_notify(state) do
timer = Process.send_after(self(), :notify, state.interval)
%{state | timer: timer}
end
end
| 25.833333 | 94 | 0.660215 |
ff6423ed26e6c04af0c78065bd2f906e7cfa255a | 102 | ex | Elixir | testData/org/elixir_lang/parser_definition/matched_or_operation_parsing_test_case/MatchedCallOperation.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/matched_or_operation_parsing_test_case/MatchedCallOperation.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/matched_or_operation_parsing_test_case/MatchedCallOperation.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | one || two positional, key: value
one ||| two positional, key: value
one or two positional, key: value | 34 | 34 | 0.72549 |
ff6425d3e6495dff93c5260f12ec7a1a27ead18c | 5,499 | ex | Elixir | coherence_demo-0.6/lib/coherence_demo_web/coherence_messages.ex | hotpyn/coherence-setup | fc10bb15d993ae0dd13a19fd178bdfb4ee13d6b6 | [
"MIT"
] | null | null | null | coherence_demo-0.6/lib/coherence_demo_web/coherence_messages.ex | hotpyn/coherence-setup | fc10bb15d993ae0dd13a19fd178bdfb4ee13d6b6 | [
"MIT"
] | null | null | null | coherence_demo-0.6/lib/coherence_demo_web/coherence_messages.ex | hotpyn/coherence-setup | fc10bb15d993ae0dd13a19fd178bdfb4ee13d6b6 | [
"MIT"
] | null | null | null | defmodule CoherenceDemoWeb.Coherence.Messages do
@moduledoc """
Application facing messages generated by the Coherence application.
This module was created by the coh.install mix task. It contains all the
messages used in the coherence application except those in other generated
files like the view and templates.
To assist in upgrading Coherence, the `Coherence.Messages` behaviour will
alway contain every message for the current version. This will help in upgrades
to ensure the user had added new the new messages from the current version.
"""
@behaviour Coherence.Messages
import CoherenceDemoWeb.Gettext
# Change this to override the "coherence" gettext domain. If you would like
# the coherence message to be part of your projects domain change it to "default"
@domain "coherence"
##################
# Messages
def account_already_confirmed, do: dgettext(@domain, "Account already confirmed.")
def account_is_not_locked, do: dgettext(@domain, "Account is not locked.")
def account_updated_successfully, do: dgettext(@domain, "Account updated successfully.")
def account_created_successfully, do: dgettext(@domain, "Account created successfully.")
def already_confirmed, do: dgettext(@domain, "already confirmed")
def already_locked, do: dgettext(@domain, "already locked")
def already_logged_in, do: dgettext(@domain, "Already logged in.")
def cant_be_blank, do: dgettext(@domain, "can't be blank")
def cant_find_that_token, do: dgettext(@domain, "Can't find that token")
def confirmation_email_sent, do: dgettext(@domain, "Confirmation email sent.")
def confirmation_token_expired, do: dgettext(@domain, "Confirmation token expired.")
def could_not_find_that_email_address,
do: dgettext(@domain, "Could not find that email address")
def forgot_your_password, do: dgettext(@domain, "Forgot your password?")
def http_authentication_required, do: dgettext(@domain, "HTTP Authentication Required")
def incorrect_login_or_password(opts),
do: dgettext(@domain, "Incorrect %{login_field} or password.", opts)
def invalid_current_password, do: dgettext(@domain, "invalid current password")
def invalid_invitation,
do: dgettext(@domain, "Invalid Invitation. Please contact the site administrator.")
def invalid_request, do: dgettext(@domain, "Invalid Request.")
def invalid_confirmation_token, do: dgettext(@domain, "Invalid confirmation token.")
def invalid_email_or_password, do: dgettext(@domain, "Invalid email or password.")
def invalid_invitation_token, do: dgettext(@domain, "Invalid invitation token.")
def invalid_reset_token, do: dgettext(@domain, "Invalid reset token.")
def invalid_unlock_token, do: dgettext(@domain, "Invalid unlock token.")
def invitation_already_sent, do: dgettext(@domain, "Invitation already sent.")
def invitation_sent, do: dgettext(@domain, "Invitation sent.")
def invite_someone, do: dgettext(@domain, "Invite Someone")
def maximum_login_attempts_exceeded,
do: dgettext(@domain, "Maximum Login attempts exceeded. Your account has been locked.")
def need_an_account, do: dgettext(@domain, "Need An Account?")
def not_locked, do: dgettext(@domain, "not locked")
def password_reset_token_expired, do: dgettext(@domain, "Password reset token expired.")
def password_updated_successfully, do: dgettext(@domain, "Password updated successfully.")
def problem_confirming_user_account,
do:
dgettext(
@domain,
"Problem confirming user account. Please contact the system administrator."
)
def registration_created_successfully,
do: dgettext(@domain, "Registration created successfully.")
def required, do: dgettext(@domain, "required")
def resend_confirmation_email, do: dgettext(@domain, "Resend confirmation email")
def reset_email_sent,
do: dgettext(@domain, "Reset email sent. Check your email for a reset link.")
def restricted_area, do: dgettext(@domain, "Restricted Area")
def send_an_unlock_email, do: dgettext(@domain, "Send an unlock email")
def sign_in, do: dgettext(@domain, "Sign In")
def sign_out, do: dgettext(@domain, "Sign Out")
def signed_in_successfully, do: dgettext(@domain, "Signed in successfully.")
def too_many_failed_login_attempts,
do: dgettext(@domain, "Too many failed login attempts. Account has been locked.")
def unauthorized_ip_address, do: dgettext(@domain, "Unauthorized IP Address")
def unlock_instructions_sent, do: dgettext(@domain, "Unlock Instructions sent.")
def user_account_confirmed_successfully,
do: dgettext(@domain, "User account confirmed successfully.")
def user_already_has_an_account, do: dgettext(@domain, "User already has an account!")
def you_must_confirm_your_account,
do: dgettext(@domain, "You must confirm your account before you can login.")
def your_account_has_been_unlocked, do: dgettext(@domain, "Your account has been unlocked")
def your_account_is_not_locked, do: dgettext(@domain, "Your account is not locked.")
def verify_user_token(opts),
do: dgettext(@domain, "Invalid %{user_token} error: %{error}", opts)
def you_are_using_an_invalid_security_token,
do:
dgettext(
@domain,
"You are using an invalid security token for this site! This security\nviolation has been logged.\n"
)
def mailer_required, do: dgettext(@domain, "Mailer configuration required!")
def account_is_inactive(), do: dgettext(@domain, "Account is inactive!")
end
| 45.825 | 108 | 0.753228 |
ff642e499a1adeb1a36f2772dda22279abfa30c8 | 191 | ex | Elixir | harbor/lib/ports/rumble/private_peer_data.ex | miapolis/port7 | 7df1223f83d055eeb6ce8f61f4af8b4f2cf33e74 | [
"MIT"
] | null | null | null | harbor/lib/ports/rumble/private_peer_data.ex | miapolis/port7 | 7df1223f83d055eeb6ce8f61f4af8b4f2cf33e74 | [
"MIT"
] | null | null | null | harbor/lib/ports/rumble/private_peer_data.ex | miapolis/port7 | 7df1223f83d055eeb6ce8f61f4af8b4f2cf33e74 | [
"MIT"
] | null | null | null | defmodule Ports.Rumble.PrivatePeerData do
alias Ports.Rumble.TileData
@derive Jason.Encoder
defstruct hand: []
@type t :: %__MODULE__{
hand: [TileData.t()]
}
end
| 15.916667 | 41 | 0.643979 |
ff6430a3d13270364b22b4c4e1e067aba439fbd4 | 2,095 | exs | Elixir | exercises/wordy/wordy_test.exs | jerith/elixir | 9a3f2a2fbee26a7b6a6b3ad74a9e6d1ff2495ed4 | [
"Apache-2.0"
] | null | null | null | exercises/wordy/wordy_test.exs | jerith/elixir | 9a3f2a2fbee26a7b6a6b3ad74a9e6d1ff2495ed4 | [
"Apache-2.0"
] | null | null | null | exercises/wordy/wordy_test.exs | jerith/elixir | 9a3f2a2fbee26a7b6a6b3ad74a9e6d1ff2495ed4 | [
"Apache-2.0"
] | 1 | 2018-07-19T23:43:56.000Z | 2018-07-19T23:43:56.000Z | if !System.get_env("EXERCISM_TEST_EXAMPLES") do
Code.load_file("wordy.exs", __DIR__)
end
ExUnit.start()
ExUnit.configure(exclude: :pending, trace: true)
defmodule WordyTest do
use ExUnit.Case
test "addition" do
assert Wordy.answer("What is 1 plus 1?") == 2
end
@tag :pending
test "more addition" do
assert Wordy.answer("What is 53 plus 2?") == 55
end
@tag :pending
test "addition with negative numbers" do
assert Wordy.answer("What is -1 plus -10?") == -11
end
@tag :pending
test "large addition" do
assert Wordy.answer("What is 123 plus 45678?") == 45801
end
@tag :pending
test "subtraction" do
assert Wordy.answer("What is 4 minus -12?") == 16
end
@tag :pending
test "multiplication" do
assert Wordy.answer("What is -3 multiplied by 25?") == -75
end
@tag :pending
test "division" do
assert Wordy.answer("What is 33 divided by -3?") == -11
end
@tag :pending
test "multiple additions" do
assert Wordy.answer("What is 1 plus 1 plus 1?") == 3
end
@tag :pending
test "addition and subtraction" do
assert Wordy.answer("What is 1 plus 5 minus -2?") == 8
end
@tag :pending
test "multiple subtraction" do
assert Wordy.answer("What is 20 minus 4 minus 13?") == 3
end
@tag :pending
test "subtraction then addition" do
assert Wordy.answer("What is 17 minus 6 plus 3?") == 14
end
@tag :pending
test "multiple multiplication" do
assert Wordy.answer("What is 2 multiplied by -2 multiplied by 3?") == -12
end
@tag :pending
test "addition and multiplication" do
assert Wordy.answer("What is -3 plus 7 multiplied by -2?") == -8
end
@tag :pending
test "multiple division" do
assert Wordy.answer("What is -12 divided by 2 divided by -3?") == 2
end
@tag :pending
test "unknown operation" do
assert_raise ArgumentError, fn ->
Wordy.answer("What is 52 cubed?")
end
end
@tag :pending
test "Non math question" do
assert_raise ArgumentError, fn ->
Wordy.answer("Who is the President of the United States?")
end
end
end
| 22.287234 | 77 | 0.658234 |
ff643ff92c18d5b9d765cb49d7160161a2b50c33 | 7,970 | ex | Elixir | lib/bn/fqp.ex | ayrat555/bn | 68e1504e4e1524ffbc4a6277829e8ada7108cb9a | [
"MIT"
] | 1 | 2018-10-27T20:33:36.000Z | 2018-10-27T20:33:36.000Z | lib/bn/fqp.ex | ayrat555/bn | 68e1504e4e1524ffbc4a6277829e8ada7108cb9a | [
"MIT"
] | 2 | 2022-02-28T13:20:14.000Z | 2022-03-28T13:23:29.000Z | lib/bn/fqp.ex | ayrat555/bn | 68e1504e4e1524ffbc4a6277829e8ada7108cb9a | [
"MIT"
] | 1 | 2019-05-02T02:29:37.000Z | 2019-05-02T02:29:37.000Z | defmodule BN.FQP do
defstruct [:coef, :modulus_coef, :dim]
alias BN.FQ
@type t :: %__MODULE__{
coef: [FQ.t()],
modulus_coef: [integer()]
}
@spec new([integer()], [integer()], keyword()) :: t() | no_return
def new(coef, modulus_coef, params \\ []) do
modulus = Keyword.get(params, :modulus, FQ.default_modulus())
coef_size = Enum.count(coef)
modulus_coef_size = Enum.count(modulus_coef)
if coef_size != modulus_coef_size,
do:
raise(ArgumentError,
message: "Coefficients and modulus coefficients have different dimensions"
)
fq_coef =
Enum.map(coef, fn coef_el ->
FQ.new(coef_el, modulus: modulus)
end)
%__MODULE__{
coef: fq_coef,
modulus_coef: modulus_coef,
dim: coef_size
}
end
@spec add(t(), t()) :: t() | no_return
def add(
fqp1 = %__MODULE__{dim: dim1, modulus_coef: modulus_coef1},
fqp2 = %__MODULE__{dim: dim2, modulus_coef: modulus_coef2}
)
when dim1 == dim2 and modulus_coef1 == modulus_coef2 do
coef =
fqp1.coef
|> Enum.zip(fqp2.coef)
|> Enum.map(fn {coef1, coef2} ->
FQ.add(coef1, coef2)
end)
%__MODULE__{modulus_coef: modulus_coef1, dim: dim1, coef: coef}
end
def add(_, _), do: raise(ArgumentError, message: "Can't add elements of different fields")
@spec sub(t(), t()) :: t() | no_return
def sub(
fqp1 = %__MODULE__{dim: dim1, modulus_coef: modulus_coef1},
fqp2 = %__MODULE__{dim: dim2, modulus_coef: modulus_coef2}
)
when dim1 == dim2 and modulus_coef1 == modulus_coef2 do
coef =
fqp1.coef
|> Enum.zip(fqp2.coef)
|> Enum.map(fn {coef1, coef2} ->
FQ.sub(coef1, coef2)
end)
%__MODULE__{modulus_coef: modulus_coef1, dim: dim1, coef: coef}
end
def sub(_, _), do: raise(ArgumentError, message: "Can't substact elements of different fields")
@spec mult(t(), t() | FQ.t() | integer()) :: t() | no_return
def mult(
fqp = %__MODULE__{dim: dim, modulus_coef: modulus_coef},
fq = %FQ{}
) do
coef =
Enum.map(fqp.coef, fn coef ->
FQ.mult(coef, fq)
end)
%__MODULE__{modulus_coef: modulus_coef, dim: dim, coef: coef}
end
def mult(
fqp = %__MODULE__{dim: dim, modulus_coef: modulus_coef},
number
)
when is_integer(number) do
coef =
Enum.map(fqp.coef, fn coef ->
FQ.mult(coef, number)
end)
%__MODULE__{modulus_coef: modulus_coef, dim: dim, coef: coef}
end
def mult(
fqp1 = %__MODULE__{dim: dim1, modulus_coef: modulus_coef1},
fqp2 = %__MODULE__{dim: dim2, modulus_coef: modulus_coef2}
)
when dim1 == dim2 and modulus_coef1 == modulus_coef2 do
pol_coef = List.duplicate(FQ.new(0), dim1 * 2 - 1)
intermediate_result =
Enum.reduce(0..(dim1 - 1), pol_coef, fn i, acc1 ->
Enum.reduce(0..(dim1 - 1), acc1, fn j, acc2 ->
cur_acc = Enum.at(acc2, i + j)
summand = FQ.mult(Enum.at(fqp1.coef, i), Enum.at(fqp2.coef, j))
List.replace_at(acc2, i + j, FQ.add(cur_acc, summand))
end)
end)
coef =
mult_modulus_coef(
Enum.reverse(intermediate_result),
modulus_coef1,
dim1
)
%__MODULE__{modulus_coef: modulus_coef1, dim: dim1, coef: coef}
end
def mult(_, _), do: raise(ArgumentError, message: "Can't multiply elements of different fields")
@spec divide(t(), t()) :: t() | no_return
def divide(fqp1, fqp2) do
inverse = inverse(fqp2)
mult(fqp1, inverse)
end
@spec inverse(t()) :: t() | no_return
def inverse(fqp) do
lm = [FQ.new(1)] ++ List.duplicate(FQ.new(0), fqp.dim)
hm = List.duplicate(FQ.new(0), fqp.dim + 1)
low = fqp.coef ++ [FQ.new(0)]
high = fqp.modulus_coef ++ [1]
deg_low = deg(low)
calculate_inverse({high, low}, {hm, lm}, fqp, deg_low)
end
@spec pow(t(), integer()) :: t() | no_return
def pow(base, exp) do
cond do
exp == 0 ->
coef = [1] ++ List.duplicate([0], base.dim - 1)
new(coef, base.modulus_coef)
exp == 1 ->
base
rem(exp, 2) == 0 ->
base
|> mult(base)
|> pow(div(exp, 2))
true ->
base
|> mult(base)
|> pow(div(exp, 2))
|> mult(base)
end
end
@spec zero?(t()) :: boolean()
def zero?(fqp) do
Enum.all?(fqp.coef, fn cur_coef ->
cur_coef.value == 0
end)
end
@spec negate(t()) :: t()
def negate(fqp) do
neg_coef = Enum.map(fqp.coef, fn coef -> FQ.new(-coef.value) end)
%{fqp | coef: neg_coef}
end
defp calculate_inverse({_, low}, {_, lm}, fqp, deg_low) when deg_low == 0 do
coef =
lm
|> Enum.take(fqp.dim)
|> Enum.map(fn el ->
FQ.divide(el, Enum.at(low, 0))
end)
new(coef, fqp.modulus_coef)
end
defp calculate_inverse({high, low}, {hm, lm}, fqp, _deg_low) do
r = poly_rounded_div(high, low)
r = r ++ List.duplicate(FQ.new(0), fqp.dim + 1 - Enum.count(r))
nm = hm
new = high
{nm, new} =
0..fqp.dim
|> Enum.reduce({nm, new}, fn i, {nm, new} ->
0..(fqp.dim - i)
|> Enum.reduce({nm, new}, fn j, {nm, new} ->
nmmult = lm |> Enum.at(i) |> FQ.new() |> FQ.mult(Enum.at(r, j))
new_nm_val = nm |> Enum.at(i + j) |> FQ.new() |> FQ.sub(nmmult)
nm = List.replace_at(nm, i + j, new_nm_val)
newmult = low |> Enum.at(i) |> FQ.new() |> FQ.mult(Enum.at(r, j))
new_val = new |> Enum.at(i + j) |> FQ.new() |> FQ.sub(newmult)
new = List.replace_at(new, i + j, new_val)
{nm, new}
end)
end)
deg_low = deg(new)
calculate_inverse({low, new}, {lm, nm}, fqp, deg_low)
end
defp poly_rounded_div(a, b) do
dega = deg(a)
degb = deg(b)
temp = a
output = List.duplicate(FQ.new(0), Enum.count(a))
output =
if dega - degb >= 0 do
{output, _} =
0..(dega - degb)
|> Enum.to_list()
|> Enum.reverse()
|> Enum.reduce({output, temp}, fn i, {out_acc, temp_acc} ->
new_val =
temp_acc
|> Enum.at(degb + i)
|> FQ.new()
|> FQ.divide(Enum.at(b, degb))
|> FQ.add(Enum.at(out_acc, i))
new_out_acc = List.replace_at(out_acc, i, new_val)
new_temp_acc =
0..degb
|> Enum.reduce(temp_acc, fn j, acc ->
updated_value =
acc |> Enum.at(i + j) |> FQ.new() |> FQ.sub(Enum.at(new_out_acc, j))
List.replace_at(
acc,
i + j,
updated_value
)
end)
{new_out_acc, new_temp_acc}
end)
output
else
output
end
dego = deg(output)
Enum.take(output, dego + 1)
end
defp deg(list) do
idx =
list
|> Enum.reverse()
|> Enum.find_index(fn el ->
if is_integer(el) do
el != 0
else
el.value != 0
end
end)
if is_nil(idx), do: 0, else: Enum.count(list) - idx - 1
end
defp mult_modulus_coef(pol_coef = [cur | tail_pol_coef], modulus_coef, dim)
when length(pol_coef) > dim do
current_idx = Enum.count(pol_coef) - dim - 1
tail_pol_coef = Enum.reverse(tail_pol_coef)
cur_result =
Enum.reduce(0..(dim - 1), tail_pol_coef, fn i, acc ->
current_acc_el = acc |> Enum.at(i + current_idx)
subtrahend = modulus_coef |> Enum.at(i) |> FQ.new() |> FQ.mult(cur)
updated_acc_el = FQ.sub(current_acc_el, subtrahend)
List.replace_at(acc, current_idx + i, updated_acc_el)
end)
cur_result
|> Enum.reverse()
|> mult_modulus_coef(modulus_coef, dim)
end
defp mult_modulus_coef(pol_coef, _, _), do: Enum.reverse(pol_coef)
end
| 25.79288 | 98 | 0.547553 |
ff6443537e1bef2815c4da826719cf2e0f926d87 | 5,234 | ex | Elixir | lib/spandex_tesla.ex | leodeoliveirasilva/spandex_tesla | 6b5029996696542ba24c5a0c97c0bb0a2d2a077f | [
"Apache-2.0"
] | null | null | null | lib/spandex_tesla.ex | leodeoliveirasilva/spandex_tesla | 6b5029996696542ba24c5a0c97c0bb0a2d2a077f | [
"Apache-2.0"
] | null | null | null | lib/spandex_tesla.ex | leodeoliveirasilva/spandex_tesla | 6b5029996696542ba24c5a0c97c0bb0a2d2a077f | [
"Apache-2.0"
] | null | null | null | defmodule SpandexTesla do
@external_resource "README.md"
@moduledoc "README.md"
|> File.read!()
|> String.split("<!-- MDOC !-->")
|> Enum.fetch!(1)
defmodule Error do
@moduledoc """
Struct used to identify the errors.
"""
defexception [:message]
end
@doc """
Telemetry handler. Attach it to the telemetry tesla events in order to trace the tesla calls.
"""
def handle_event([:tesla, :request, :start], _measurements, _metadata, _config) do
if tracer().current_trace_id([]) do
tracer().start_span("request", [])
Logger.metadata(
trace_id: to_string(tracer().current_trace_id([])),
span_id: to_string(tracer().current_span_id([]))
)
end
end
def handle_event(
[:tesla, :request, :stop],
measurements,
%{error: error, env: env} = metadata,
config
) do
now = clock_adapter().system_time()
%{duration: duration} = measurements
%{url: url, method: method} = env
trace_opts =
format_trace_options(
%{duration: duration, status: nil, method: method, now: now, url: url},
metadata,
config || []
)
tracer().span_error(
%Error{message: error},
nil,
trace_opts
)
tracer().finish_span([])
end
def handle_event([:tesla, :request, :stop], measurements, metadata, config) do
if tracer().current_trace_id([]) do
now = clock_adapter().system_time()
%{duration: duration} = measurements
%{status: status, url: url, method: method} = metadata[:env]
trace_opts =
format_trace_options(
%{duration: duration, method: method, now: now, status: status, url: url},
metadata,
config || []
)
case status do
x when x not in 200..299 ->
tracer().span_error(
%Error{message: "Request has failed with status response #{status}"},
nil,
trace_opts
)
_ ->
tracer().update_span(trace_opts)
end
tracer().finish_span([])
end
end
def handle_event([:tesla, :request, :exception], _measurements, metadata, _config) do
if tracer().current_trace_id([]) do
reason = metadata[:reason] || metadata[:error]
tracer().span_error(%Error{message: inspect(reason)}, nil, [])
tracer().finish_span([])
Logger.metadata(
trace_id: to_string(tracer().current_trace_id([])),
span_id: to_string(tracer().current_span_id([]))
)
end
end
def handle_event([:tesla, :request], measurements, metadata, config) do
if tracer().current_trace_id([]) do
now = clock_adapter().system_time() |> System.convert_time_unit(:native, :nanosecond)
%{request_time: request_time} = measurements
%{result: result} = metadata
tracer().start_span("request", [])
Logger.metadata(
trace_id: to_string(tracer().current_trace_id([])),
span_id: to_string(tracer().current_span_id([]))
)
span_result(result, %{request_time: request_time, now: now}, metadata, config || [])
tracer().finish_span([])
end
end
defp span_result({:ok, request}, measurements, metadata, config) do
%{request_time: request_time, now: now} = measurements
%{status: status, url: url, method: method} = request
duration = System.convert_time_unit(request_time, :microsecond, :nanosecond)
trace_opts =
format_trace_options(
%{duration: duration, method: method, now: now, status: status, url: url},
metadata,
config
)
tracer().update_span(trace_opts)
end
defp span_result({:error, reason}, _measurements, _metadata, _config) do
tracer().span_error(%Error{message: inspect(reason)}, nil, [])
end
defp format_trace_options(
%{duration: duration, method: method, now: now, status: status, url: url},
metadata,
config
) do
upcased_method = method |> to_string() |> String.upcase()
[
start: now - duration,
completion_time: now,
service: service(),
resource: resource_name(metadata, config),
type: :web,
http: [
url: url,
status_code: status,
method: upcased_method
]
]
end
defp resource_name(metadata, config) do
get_resource_name = Keyword.get(config, :resource, &default_resource_name/1)
get_resource_name.(metadata)
end
defp default_resource_name(%{env: %{url: url, method: method, opts: opts}}) do
upcased_method = method |> to_string() |> String.upcase()
resource_url = Keyword.get(opts, :req_url, url)
"#{upcased_method} #{resource_url}"
end
defp default_resource_name(%{result: {:ok, %{method: method, url: url, opts: opts}}}) do
upcased_method = method |> to_string() |> String.upcase()
resource_url = Keyword.get(opts, :req_url, url)
"#{upcased_method} #{resource_url}"
end
defp tracer do
Application.fetch_env!(:spandex_tesla, :tracer)
end
defp service do
Application.get_env(:spandex_tesla, :service, :tesla)
end
defp clock_adapter do
Application.get_env(:spandex_tesla, :clock_adapter, System)
end
end
| 27.547368 | 95 | 0.617692 |
ff64499d7e77d82cdae0eb1111646c21f4ab1f26 | 880 | ex | Elixir | clients/logging/lib/google_api/logging/v2/metadata.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/logging/lib/google_api/logging/v2/metadata.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/logging/lib/google_api/logging/v2/metadata.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Logging.V2 do
@moduledoc """
API client metadata for GoogleApi.Logging.V2.
"""
@discovery_revision "20211130"
def discovery_revision(), do: @discovery_revision
end
| 32.592593 | 74 | 0.757955 |
ff646d1d8d1946fbba3c4977f2d60bcaeba22ea2 | 1,696 | exs | Elixir | test/bintree_test.exs | wmean-spec/bintree | 881dca96437ea67bf588587f0f924cd9c9efcdbe | [
"MIT"
] | null | null | null | test/bintree_test.exs | wmean-spec/bintree | 881dca96437ea67bf588587f0f924cd9c9efcdbe | [
"MIT"
] | null | null | null | test/bintree_test.exs | wmean-spec/bintree | 881dca96437ea67bf588587f0f924cd9c9efcdbe | [
"MIT"
] | null | null | null | defmodule BintreeTest do
use ExUnit.Case
doctest Bintree
describe "creating bintree" do
test "using value" do
assert Bintree.new(:some_value) == %Bintree{value: :some_value}
end
test "using three values" do
assert Bintree.new(1, 2, 3) == %Bintree{
value: 1,
left: %Bintree{value: 2},
right: %Bintree{value: 3}
}
end
test "using other bintrees" do
left = Bintree.new(2)
right = Bintree.new(3)
assert Bintree.new(1, left, right) == %Bintree{
value: 1,
left: %Bintree{value: 2},
right: %Bintree{value: 3}
}
end
test "using while-generator" do
two = %Bintree{value: 2}
assert Bintree.new(1, &(&1 + 1), &(&1 + 1), &(&1 < 3)) == %Bintree{
value: 1,
left: two,
right: two
}
end
test "using depth-generator" do
two = %Bintree{value: 2, left: nil, right: nil}
assert Bintree.new(1, &(&1 + 1), &(&1 + 1), 2) == %Bintree{value: 1, left: two, right: two}
end
end
test "insert new value" do
tree =
Bintree.new(123)
|> Bintree.insert([:left], 6)
assert tree == %Bintree{value: 123, left: Bintree.new(6)}
end
test "update value" do
tree =
Bintree.new(123)
|> Bintree.insert([:left], 6)
|> Bintree.insert([:left], 5)
assert tree == %Bintree{value: 123, left: Bintree.new(5)}
end
test "filter bintree" do
tree =
Bintree.new(1, &(&1 + 1), &(&1 + 2), 2)
|> Bintree.filter(&(&1 != 3))
assert tree == %Bintree{value: 1, left: Bintree.new(2)}
end
end
| 24.228571 | 97 | 0.519458 |
ff648b4b87ad46b48b9f8eeef93cdd62707a8c4d | 1,971 | exs | Elixir | test/bandit/initial_handler_test.exs | mtrudel/bandit | 9b1e0f66637a16fb18921bb29b6b577938b4eb07 | [
"MIT"
] | 226 | 2020-05-18T09:36:32.000Z | 2022-03-30T00:25:51.000Z | test/bandit/initial_handler_test.exs | mtrudel/bandit | 9b1e0f66637a16fb18921bb29b6b577938b4eb07 | [
"MIT"
] | 11 | 2021-10-11T13:48:24.000Z | 2022-03-05T20:18:11.000Z | test/bandit/initial_handler_test.exs | mtrudel/bandit | 9b1e0f66637a16fb18921bb29b6b577938b4eb07 | [
"MIT"
] | 7 | 2020-05-18T09:36:35.000Z | 2022-02-08T11:12:16.000Z | defmodule InitialHandlerTest do
use ExUnit.Case, async: true
use ServerHelpers
use FinchHelpers
def report_version(conn) do
body = "#{get_http_protocol(conn)} #{conn.scheme}"
send_resp(conn, 200, body)
end
describe "HTTP/1.x handling over TCP" do
setup :http_server
setup :finch_http1_client
test "sets up the HTTP 1.x handler", %{base: base, finch_name: finch_name} do
{:ok, response} = Finch.build(:get, base <> "/report_version") |> Finch.request(finch_name)
assert response.status == 200
assert response.body == "HTTP/1.1 http"
end
end
describe "HTTP/1.x handling over SSL" do
setup :https_server
setup :finch_http1_client
test "sets up the HTTP 1.x handler", %{base: base, finch_name: finch_name} do
{:ok, response} = Finch.build(:get, base <> "/report_version") |> Finch.request(finch_name)
assert response.status == 200
assert response.body == "HTTP/1.1 https"
end
test "closes with an error if HTTP/1.1 is attempted over an h2 ALPN connection", context do
socket = SimpleH2Client.tls_client(context)
:ssl.send(socket, "GET / HTTP/1.1\r\n")
assert :ssl.recv(socket, 0) == {:error, :closed}
end
end
describe "HTTP/2 handling over TCP" do
setup :http_server
setup :finch_h2_client
test "sets up the HTTP/2 handler", %{base: base, finch_name: finch_name} do
{:ok, response} = Finch.build(:get, base <> "/report_version") |> Finch.request(finch_name)
assert response.status == 200
assert response.body == "HTTP/2 http"
end
end
describe "HTTP/2 handling over SSL" do
setup :https_server
setup :finch_h2_client
test "sets up the HTTP/2 handler", %{base: base, finch_name: finch_name} do
{:ok, response} = Finch.build(:get, base <> "/report_version") |> Finch.request(finch_name)
assert response.status == 200
assert response.body == "HTTP/2 https"
end
end
end
| 30.323077 | 97 | 0.663623 |
ff64900adc83e7fa1aa2764b4965448276fb057c | 3,336 | exs | Elixir | test/currencyconverter/transactions_test.exs | Aguiar575/Currency-ConvertEX | 886181476c49bd068df5450ab98dcb885ac55746 | [
"MIT"
] | 1 | 2021-12-27T01:29:58.000Z | 2021-12-27T01:29:58.000Z | test/currencyconverter/transactions_test.exs | Aguiar575/Currency-ConvertEX | 886181476c49bd068df5450ab98dcb885ac55746 | [
"MIT"
] | null | null | null | test/currencyconverter/transactions_test.exs | Aguiar575/Currency-ConvertEX | 886181476c49bd068df5450ab98dcb885ac55746 | [
"MIT"
] | null | null | null | defmodule Currencyconverter.TransactionTest do
use Currencyconverter.DataCase
alias Currencyconverter.Transaction
describe "transactions" do
alias Currencyconverter.Transaction.Transactions
import Currencyconverter.TransactionFixtures
@invalid_attrs %{
conversion_rate: nil,
destination_currency: nil,
origin_currency: nil,
origin_currency_value: nil,
user_id: nil
}
test "list_transactions/0 returns all transactions" do
transactions = transactions_fixture()
assert Transaction.list_transactions() == [transactions]
end
test "get_transactions!/1 returns the transactions with given id" do
transactions = transactions_fixture()
assert Transaction.get_transactions!(transactions.id) == transactions
end
test "create_transactions/1 with valid data creates a transactions" do
valid_attrs = %{
conversion_rate: "42",
destination_currency: "some destination_currency",
origin_currency: "some origin_currency",
origin_currency_value: "42",
user_id: "42"
}
assert {:ok, %Transactions{} = transactions} = Transaction.create_transactions(valid_attrs)
assert transactions.conversion_rate == "42"
assert transactions.destination_currency == "some destination_currency"
assert transactions.origin_currency == "some origin_currency"
assert transactions.origin_currency_value == "42"
assert transactions.user_id == "42"
end
test "create_transactions/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Transaction.create_transactions(@invalid_attrs)
end
test "update_transactions/2 with valid data updates the transactions" do
transactions = transactions_fixture()
update_attrs = %{
conversion_rate: "43",
destination_currency: "some updated destination_currency",
origin_currency: "some updated origin_currency",
origin_currency_value: "43",
user_id: "43"
}
assert {:ok, %Transactions{} = transactions} =
Transaction.update_transactions(transactions, update_attrs)
assert transactions.conversion_rate == "43"
assert transactions.destination_currency == "some updated destination_currency"
assert transactions.origin_currency == "some updated origin_currency"
assert transactions.origin_currency_value == "43"
assert transactions.user_id == "43"
end
test "update_transactions/2 with invalid data returns error changeset" do
transactions = transactions_fixture()
assert {:error, %Ecto.Changeset{}} =
Transaction.update_transactions(transactions, @invalid_attrs)
assert transactions == Transaction.get_transactions!(transactions.id)
end
test "delete_transactions/1 deletes the transactions" do
transactions = transactions_fixture()
assert {:ok, %Transactions{}} = Transaction.delete_transactions(transactions)
assert_raise Ecto.NoResultsError, fn -> Transaction.get_transactions!(transactions.id) end
end
test "change_transactions/1 returns a transactions changeset" do
transactions = transactions_fixture()
assert %Ecto.Changeset{} = Transaction.change_transactions(transactions)
end
end
end
| 36.26087 | 97 | 0.718525 |
ff64ab8b5f30d5514dd969618a99b438ddf07540 | 18,999 | ex | Elixir | lib/mix/lib/mix.ex | mrcasals/elixir | 8cb9ebf708f2789a0e7dbf574294b82a55dd2f21 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix.ex | mrcasals/elixir | 8cb9ebf708f2789a0e7dbf574294b82a55dd2f21 | [
"Apache-2.0"
] | 1 | 2021-07-01T17:58:37.000Z | 2021-07-01T19:05:37.000Z | lib/mix/lib/mix.ex | mrcasals/elixir | 8cb9ebf708f2789a0e7dbf574294b82a55dd2f21 | [
"Apache-2.0"
] | null | null | null | defmodule Mix do
@moduledoc ~S"""
Mix is a build tool that provides tasks for creating, compiling,
and testing Elixir projects, managing its dependencies, and more.
## Mix.Project
The foundation of Mix is a project. A project can be defined by using
`Mix.Project` in a module, usually placed in a file named `mix.exs`:
defmodule MyApp.MixProject do
use Mix.Project
def project do
[
app: :my_app,
version: "1.0.0"
]
end
end
See the `Mix.Project` module for detailed documentation on Mix projects.
Once the project is defined, a number of default Mix tasks can be run
directly from the command line:
* `mix compile` - compiles the current project
* `mix test` - runs tests for the given project
* `mix run` - runs a particular command inside the project
Each task has its own options and sometimes specific configuration
to be defined in the `project/0` function. You can use `mix help`
to list all available tasks and `mix help NAME` to show help for
a particular task.
The best way to get started with your first project is by calling
`mix new my_project` from the command line.
## Mix.Task
Tasks are what make Mix extensible.
Projects can extend Mix behaviour by adding their own tasks. For
example, adding the task below inside your project will
make it available to everyone that uses your project:
defmodule Mix.Tasks.Hello do
use Mix.Task
def run(_) do
Mix.shell().info("Hello world")
end
end
The task can now be invoked with `mix hello`.
See the `Mix.Task` behaviour for detailed documentation on Mix tasks.
## Dependencies
Mix also manages your dependencies and integrates nicely with the [Hex package
manager](https://hex.pm).
In order to use dependencies, you need to add a `:deps` key
to your project configuration. We often extract the list of dependencies
into its own function:
defmodule MyApp.MixProject do
use Mix.Project
def project do
[
app: :my_app,
version: "1.0.0",
deps: deps()
]
end
defp deps do
[
{:ecto, "~> 2.0"},
{:plug, github: "elixir-lang/plug"}
]
end
end
You can run `mix help deps` to learn more about dependencies in Mix.
## Environments
Mix supports different environments. Environments allow developers
to prepare and organize their project specifically for different
scenarios. By default, Mix provides three environments:
* `:dev` - the default environment
* `:test` - the environment `mix test` runs on
* `:prod` - the environment your dependencies run on
The environment can be changed via the command line by setting
the `MIX_ENV` environment variable, for example:
$ MIX_ENV=prod mix run server.exs
You can also specify that certain dependencies are available only for
certain environments:
{:some_test_dependency, "~> 1.0", only: :test}
The environment can be read via `Mix.env/0`.
## Targets
Besides environments, Mix supports targets. Targets are useful when a
project needs to compile to different architectures and some of the
dependencies are only available to some of them. By default, the target
is `:host` but it can be set via the `MIX_TARGET` environment variable.
The target can be read via `Mix.target/0`.
## Aliases
Aliases are shortcuts or tasks specific to the current project.
In the [Mix.Task section](#module-mix-task), we have defined a task that would be
available to everyone using our project as a dependency. What if
we wanted the task to only be available for our project? Just
define an alias:
defmodule MyApp.MixProject do
use Mix.Project
def project do
[
app: :my_app,
version: "1.0.0",
aliases: aliases()
]
end
defp aliases do
[
c: "compile",
hello: &hello/1
]
end
defp hello(_) do
Mix.shell().info("Hello world")
end
end
In the example above, we have defined two aliases. One is `mix c`
which is a shortcut for `mix compile`. The other is named
`mix hello`, which is the equivalent to the `Mix.Tasks.Hello`
we have defined in the [Mix.Task section](#module-mix-task).
Aliases may also be lists, specifying multiple tasks to be run
consecutively:
[all: [&hello/1, "deps.get --only #{Mix.env()}", "compile"]]
In the example above, we have defined an alias named `mix all`,
that prints "Hello world", then fetches dependencies specific
to the current environment, and compiles the project.
Aliases can also be used to augment existing tasks. Let's suppose
you want to augment `mix clean` to clean another directory Mix does
not know about:
[clean: ["clean", &clean_extra/1]]
Where `&clean_extra/1` would be a function in your `mix.exs`
with extra cleanup logic.
Arguments given to the alias will be appended to the arguments of
the last task in the list. Except when overriding an existing task.
In this case, the arguments will be given to the original task,
in order to preserve semantics. For example, in the `:clean` alias
above, the arguments given to the alias will be passed to "clean"
and not to `clean_extra/1`.
Aliases defined in the current project do not affect its dependencies
and aliases defined in dependencies are not accessible from the
current project.
Aliases can be used very powerfully to also run Elixir scripts and
shell commands, for example:
# priv/hello1.exs
IO.puts("Hello One")
# priv/hello2.exs
IO.puts("Hello Two")
# priv/world.sh
#!/bin/sh
echo "world!"
# mix.exs
defp aliases do
[
some_alias: ["hex.info", "run priv/hello1.exs", "cmd priv/world.sh"]
]
end
In the example above we have created the alias `some_alias` that will
run the task `mix hex.info`, then `mix run` to run an Elixir script,
then `mix cmd` to execute a command line shell script. This shows how
powerful aliases mixed with Mix tasks can be.
Mix tasks are designed to run only once. This prevents the same task
from being executed multiple times. For example, if there are several tasks
depending on `mix compile`, the code will be compiled once. Tasks can
be executed again if they are explicitly reenabled using `Mix.Task.reenable/1`:
another_alias: [
"format --check-formatted priv/hello1.exs",
"cmd priv/world.sh",
fn _ -> Mix.Task.reenable("format") end,
"format --check-formatted priv/hello2.exs"
]
Some tasks are automatically reenabled though, as they are expected to
be invoked multiple times. They are: `mix cmd`, `mix do`, `mix loadconfig`,
`mix profile.cprof`, `mix profile.eprof`, `mix profile.fprof`, `mix run`,
and `mix xref`.
It is worth mentioning that some tasks, such as in the case of the
`mix format` command in the example above, can accept multiple files so it
could be rewritten as:
another_alias: ["format --check-formatted priv/hello1.exs priv/hello2.exs"]
## Environment variables
Several environment variables can be used to modify Mix's behaviour.
Mix responds to the following variables:
* `MIX_ARCHIVES` - specifies the directory into which the archives should be installed
(default: `~/.mix/archives`)
* `MIX_BUILD_ROOT` - sets the root directory where build artifacts
should be written to. For example, "_build". If `MIX_BUILD_PATH` is set, this
option is ignored.
* `MIX_BUILD_PATH` - sets the project `Mix.Project.build_path/0` config. This option
must always point to a subdirectory inside a temporary directory. For instance,
never "/tmp" or "_build" but "_build/PROD" or "/tmp/PROD", as required by Mix
* `MIX_DEPS_PATH` - sets the project `Mix.Project.deps_path/0` config for the current project (default: `deps`)
* `MIX_DEBUG` - outputs debug information about each task before running it
* `MIX_ENV` - specifies which environment should be used. See [Environments](#module-environments)
* `MIX_TARGET` - specifies which target should be used. See [Targets](#module-targets)
* `MIX_EXS` - changes the full path to the `mix.exs` file
* `MIX_HOME` - path to Mix's home directory, stores configuration files and scripts used by Mix
(default: `~/.mix`)
* `MIX_INSTALL_DIR` - (since v1.12.0) specifies directory where `Mix.install/2` keeps
installs cache
* `MIX_PATH` - appends extra code paths
* `MIX_QUIET` - does not print information messages to the terminal
* `MIX_REBAR` - path to rebar command that overrides the one Mix installs
(default: `~/.mix/rebar`)
* `MIX_REBAR3` - path to rebar3 command that overrides the one Mix installs
(default: `~/.mix/rebar3`)
* `MIX_XDG` - asks Mix to follow the [XDG Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html)
for its home directory and configuration files. This behaviour needs to
be opt-in due to backwards compatibility. `MIX_HOME` has higher preference
than `MIX_XDG`. If none of the variables are set, the default directory
`~/.mix` will be used
Environment variables that are not meant to hold a value (and act basically as
flags) should be set to either `1` or `true`, for example:
$ MIX_DEBUG=1 mix compile
"""
use Application
import Kernel, except: [raise: 2]
@doc false
def start do
{:ok, _} = Application.ensure_all_started(:mix)
:ok
end
@doc false
def start(_type, []) do
children = [Mix.State, Mix.TasksServer, Mix.ProjectStack]
opts = [strategy: :one_for_one, name: Mix.Supervisor, max_restarts: 0]
Supervisor.start_link(children, opts)
end
@doc """
Returns the current Mix environment.
This function should not be used at runtime in application code (as opposed
to infrastructure and build code like Mix tasks). Mix is a build tool and may
not be available after the code is compiled (for example in a release).
To differentiate the program behavior depending on the environment, it is
recommended to use application environment through `Application.get_env/3`.
Proper configuration can be set in config files, often per-environment
(see the `Config` module for more information).
"""
@spec env() :: atom()
def env do
# env is not available on bootstrapping, so set a :dev default
Mix.State.get(:env, :dev)
end
@doc """
Changes the current Mix environment to `env`.
Be careful when invoking this function as any project
configuration won't be reloaded.
This function should not be used at runtime in application code
(see `env/0` for more information).
"""
@spec env(atom()) :: :ok
def env(env) when is_atom(env) do
Mix.State.put(:env, env)
end
@doc """
Returns the Mix target.
"""
@spec target() :: atom()
def target do
# target is not available on bootstrapping, so set a :host default
Mix.State.get(:target, :host)
end
@doc """
Changes the current Mix target to `target`.
Be careful when invoking this function as any project
configuration won't be reloaded.
"""
@spec target(atom()) :: :ok
def target(target) when is_atom(target) do
Mix.State.put(:target, target)
end
@doc """
Returns the default compilers used by Mix.
It can be used in your `mix.exs` to prepend or
append new compilers to Mix:
def project do
[compilers: Mix.compilers() ++ [:foo, :bar]]
end
"""
@spec compilers() :: [atom()]
def compilers do
[:yecc, :leex, :erlang, :elixir, :app]
end
@doc """
Returns the current shell.
`shell/0` can be used as a wrapper for the current shell. It contains
conveniences for requesting information from the user, printing to the
shell and so forth. The Mix shell is swappable (see `shell/1`), allowing
developers to use a test shell that simply sends messages to the current
process instead of performing IO (see `Mix.Shell.Process`).
By default, this returns `Mix.Shell.IO`.
## Examples
Mix.shell().info("Preparing to do something dangerous...")
if Mix.shell().yes?("Are you sure?") do
# do something dangerous
end
"""
@spec shell() :: module
def shell do
Mix.State.get(:shell, Mix.Shell.IO)
end
@doc """
Sets the current shell.
As an argument you may pass `Mix.Shell.IO`, `Mix.Shell.Process`,
`Mix.Shell.Quiet`, or any module that implements the `Mix.Shell`
behaviour.
After calling this function, `shell` becomes the shell that is
returned by `shell/0`.
## Examples
iex> Mix.shell(Mix.Shell.IO)
:ok
You can use `shell/0` and `shell/1` to temporarily switch shells,
for example, if you want to run a Mix Task that normally produces
a lot of output:
shell = Mix.shell()
Mix.shell(Mix.Shell.Quiet)
try do
Mix.Task.run("noisy.task")
after
Mix.shell(shell)
end
"""
@spec shell(module) :: :ok
def shell(shell) do
Mix.State.put(:shell, shell)
end
@doc """
Returns `true` if Mix is in debug mode, `false` otherwise.
"""
@spec debug?() :: boolean()
def debug? do
Mix.State.get(:debug, false)
end
@doc """
Sets Mix debug mode.
"""
@spec debug(boolean()) :: :ok
def debug(debug) when is_boolean(debug) do
Mix.State.put(:debug, debug)
end
@doc """
Raises a Mix error that is nicely formatted, defaulting to exit code `1`.
"""
@spec raise(binary) :: no_return
def raise(message) do
__MODULE__.raise(message, exit_code: 1)
end
@doc """
Raises a Mix error that is nicely formatted.
## Options
* `:exit_code` - defines exit code value, defaults to `1`
"""
@doc since: "1.12.0"
@spec raise(binary, exit_code: non_neg_integer()) :: no_return
def raise(message, opts) when is_binary(message) and is_list(opts) do
Kernel.raise(Mix.Error, mix: Keyword.get(opts, :exit_code, 1), message: message)
end
@doc """
The path for local archives or escripts.
"""
@doc since: "1.10.0"
@spec path_for(:archives | :escripts) :: String.t()
def path_for(:archives) do
System.get_env("MIX_ARCHIVES") || Path.join(Mix.Utils.mix_home(), "archives")
end
def path_for(:escripts) do
Path.join(Mix.Utils.mix_home(), "escripts")
end
@doc """
Installs and starts dependencies.
The given `deps` should be in the same format as defined in a regular Mix
project. See `mix help deps` for more information. As a shortcut, an atom
can be given as dependency to mean the latest version. In other words,
specifying `:decimal` is the same as `{:decimal, ">= 0.0.0"}`.
After each successful installation, a given set of dependencies is cached
so starting another VM and calling `Mix.install/2` with the same dependencies
will avoid unnecessary downloads and compilations. The location of the cache
directory can be controlled using the `MIX_INSTALL_DIR` environment variable.
This function can only be called outside of a Mix project and only with the
same dependencies in the given VM.
**Note:** this feature is currently experimental and it may change
in future releases.
## Options
* `:force` - if `true`, removes install cache. This is useful when you want
to update your dependencies or your install got into an inconsistent state
(Default: `false`)
* `:verbose` - if `true`, prints additional debugging information
(Default: `false`)
* `:consolidate_protocols` - if `true`, runs protocol
consolidation via the `mix compile.protocols` task (Default: `true`)
* `:elixir` - if set, ensures the current Elixir version matches the given
version requirement (Default: `nil`)
## Examples
Mix.install([
:decimal,
{:jason, "~> 1.0"}
])
"""
@doc since: "1.12.0"
def install(deps, opts \\ [])
def install(deps, opts) when is_list(deps) and is_list(opts) do
Mix.start()
if Mix.Project.get() do
Mix.raise("Mix.install/2 cannot be used inside a Mix project")
end
elixir_requirement = opts[:elixir]
elixir_version = System.version()
if !!elixir_requirement and not Version.match?(elixir_version, elixir_requirement) do
Mix.raise(
"Mix.install/2 declared it supports only Elixir #{elixir_requirement} " <>
"but you're running on Elixir #{elixir_version}"
)
end
deps =
Enum.map(deps, fn
dep when is_atom(dep) ->
{dep, ">= 0.0.0"}
{app, opts} when is_atom(app) and is_list(opts) ->
{app, maybe_expand_path_dep(opts)}
{app, requirement, opts} when is_atom(app) and is_binary(requirement) and is_list(opts) ->
{app, requirement, maybe_expand_path_dep(opts)}
other ->
other
end)
force? = !!opts[:force]
case Mix.State.get(:installed) do
nil ->
:ok
^deps when not force? ->
:ok
_ ->
Mix.raise("Mix.install/2 can only be called with the same dependencies in the given VM")
end
installs_root =
System.get_env("MIX_INSTALL_DIR") ||
Path.join(Mix.Utils.mix_cache(), "installs")
id = deps |> :erlang.term_to_binary() |> :erlang.md5() |> Base.encode16(case: :lower)
version = "elixir-#{System.version()}-erts-#{:erlang.system_info(:version)}"
dir = Path.join([installs_root, version, id])
if opts[:verbose] do
Mix.shell().info("using #{dir}")
end
if force? do
File.rm_rf!(dir)
end
config = [
version: "0.1.0",
build_per_environment: true,
build_path: "_build",
lockfile: "mix.lock",
deps_path: "deps",
deps: deps,
app: :mix_install,
erlc_paths: ["src"],
elixirc_paths: ["lib"],
compilers: [],
consolidate_protocols: Keyword.get(opts, :consolidate_protocols, true)
]
:ok = Mix.Local.append_archives()
:ok = Mix.ProjectStack.push(__MODULE__.InstallProject, config, "nofile")
build_dir = Path.join(dir, "_build")
try do
run_deps? = not File.dir?(build_dir)
File.mkdir_p!(dir)
File.cd!(dir, fn ->
if run_deps? do
Mix.Task.rerun("deps.get")
end
Mix.Task.rerun("deps.loadpaths")
Mix.Task.rerun("compile")
end)
for app <- Mix.Project.deps_apps() do
Application.ensure_all_started(app)
end
Mix.State.put(:installed, deps)
:ok
after
Mix.ProjectStack.pop()
end
end
defp maybe_expand_path_dep(opts) do
if Keyword.has_key?(opts, :path) do
Keyword.update!(opts, :path, &Path.expand/1)
else
opts
end
end
end
| 30.34984 | 148 | 0.664772 |
ff64c2fbd3a8d87159e71ebd9a364f1cec2dc339 | 11,709 | ex | Elixir | lib/stripe/subscriptions/subscription.ex | esse/stripity_stripe-1 | 4434ed7911e818f483cd0eb91ca8f867165fa9f1 | [
"BSD-3-Clause"
] | null | null | null | lib/stripe/subscriptions/subscription.ex | esse/stripity_stripe-1 | 4434ed7911e818f483cd0eb91ca8f867165fa9f1 | [
"BSD-3-Clause"
] | null | null | null | lib/stripe/subscriptions/subscription.ex | esse/stripity_stripe-1 | 4434ed7911e818f483cd0eb91ca8f867165fa9f1 | [
"BSD-3-Clause"
] | 1 | 2021-09-06T14:46:19.000Z | 2021-09-06T14:46:19.000Z | defmodule Stripe.Subscription do
@moduledoc """
Work with Stripe subscription objects.
You can:
- Create a subscription
- Retrieve a subscription
- Update a subscription
- Delete a subscription
Stripe API reference: https://stripe.com/docs/api#subscription
"""
use Stripe.Entity
import Stripe.Request
import Stripe.Util, only: [log_deprecation: 1]
@type pause_collection :: %{
behavior: String.t(),
resumes_at: Stripe.timestamp()
}
@type pending_invoice_item_interval :: %{
interval: String.t(),
interval_count: integer
}
@type pending_update :: %{
billing_cycle_anchor: Stripe.timestamp(),
expires_at: Stripe.timestamp(),
subscription_items: [Stripe.SubscriptionItem.t()],
trial_end: Stripe.timestamp(),
trial_from_plan: boolean
}
@type t :: %__MODULE__{
id: Stripe.id(),
object: String.t(),
application_fee_percent: float | nil,
billing_cycle_anchor: Stripe.timestamp() | nil,
billing_thresholds: map | nil,
collection_method: String.t() | nil,
collection_method_cycle_anchor: Stripe.timestamp() | nil,
collection_method_thresholds: Stripe.Types.collection_method_thresholds() | nil,
cancel_at: Stripe.timestamp() | nil,
cancel_at_period_end: boolean,
canceled_at: Stripe.timestamp() | nil,
created: Stripe.timestamp(),
current_period_end: Stripe.timestamp() | nil,
current_period_start: Stripe.timestamp() | nil,
customer: Stripe.id() | Stripe.Customer.t(),
days_until_due: integer | nil,
default_payment_method: Stripe.id() | Stripe.PaymentMethod.t() | nil,
default_source: Stripe.id() | Stripe.Source.t() | nil,
default_tax_rates: list(Stripe.TaxRate),
discount: Stripe.Discount.t() | nil,
ended_at: Stripe.timestamp() | nil,
items: Stripe.List.t(Stripe.SubscriptionItem.t()),
latest_invoice: Stripe.id() | Stripe.Invoice.t() | nil,
livemode: boolean,
metadata: Stripe.Types.metadata(),
next_pending_invoice_item_invoice: Stripe.timestamp() | nil,
pending_invoice_item_interval: pending_invoice_item_interval() | nil,
pending_setup_intent: Stripe.SetupIntent.t() | nil,
pending_update: pending_update() | nil,
plan: Stripe.Plan.t() | nil,
pause_collection: pause_collection() | nil,
quantity: integer | nil,
schedule: String.t() | nil,
start_date: Stripe.timestamp(),
status: String.t(),
tax_percent: float | nil,
trial_end: Stripe.timestamp() | nil,
trial_start: Stripe.timestamp() | nil
}
defstruct [
:id,
:object,
:application_fee_percent,
:billing_cycle_anchor,
:billing_thresholds,
:collection_method,
:collection_method_cycle_anchor,
:collection_method_thresholds,
:cancel_at,
:cancel_at_period_end,
:canceled_at,
:created,
:current_period_end,
:current_period_start,
:customer,
:days_until_due,
:default_payment_method,
:default_source,
:default_tax_rates,
:discount,
:ended_at,
:items,
:latest_invoice,
:livemode,
:metadata,
:next_pending_invoice_item_invoice,
:pending_invoice_item_interval,
:pending_setup_intent,
:pending_update,
:plan,
:pause_collection,
:quantity,
:schedule,
:start_date,
:status,
:tax_percent,
:trial_end,
:trial_start
]
@plural_endpoint "subscriptions"
@doc """
Create a subscription.
"""
@spec create(params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params: %{
:customer => Stripe.id() | Stripe.Customer.t(),
optional(:application_fee_percent) => integer,
optional(:billing_cycle_anchor) => Stripe.timestamp(),
optional(:billing_thresholds) => map,
optional(:collection_method) => String.t(),
optional(:collection_method_cycle_anchor) => Stripe.timestamp(),
optional(:cancel_at) => Stripe.timestamp(),
optional(:collection_method) => String.t(),
optional(:coupon) => Stripe.id() | Stripe.Coupon.t(),
optional(:days_until_due) => non_neg_integer,
:items => [
%{
optional(:plan) => Stripe.id() | Stripe.Plan.t(),
optional(:price) => Stripe.id() | Stripe.Price.t(),
optional(:billing_methods) => map,
optional(:metadata) => map,
optional(:quantity) => non_neg_integer,
optional(:tax_rates) => list
}
],
optional(:default_payment_method) => Stripe.id(),
optional(:metadata) => Stripe.Types.metadata(),
optional(:prorate) => boolean,
optional(:proration_behavior) => String.t(),
optional(:tax_percent) => float,
optional(:trial_end) => Stripe.timestamp(),
optional(:trial_from_plan) => boolean,
optional(:trial_period_days) => non_neg_integer
}
def create(params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint)
|> put_params(params)
|> put_method(:post)
|> cast_to_id([:coupon, :customer])
|> make_request()
end
@doc """
Retrieve a subscription.
"""
@spec retrieve(Stripe.id() | t, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
def retrieve(id, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}")
|> put_method(:get)
|> make_request()
end
@doc """
Update a subscription.
Takes the `id` and a map of changes.
"""
@spec update(Stripe.id() | t, params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params: %{
optional(:application_fee_percent) => float,
optional(:billing_cycle_anchor) => Stripe.timestamp(),
optional(:billing_thresholds) => map,
optional(:collection_method) => String.t(),
optional(:collection_method_cycle_anchor) => Stripe.timestamp(),
optional(:cancel_at) => Stripe.timestamp(),
optional(:cancel_at_period_end) => boolean(),
optional(:collection_method) => String.t(),
optional(:coupon) => Stripe.id() | Stripe.Coupon.t(),
optional(:days_until_due) => non_neg_integer,
optional(:items) => [
%{
optional(:id) => Stripe.id() | binary(),
optional(:plan) => Stripe.id() | Stripe.Plan.t(),
optional(:price) => Stripe.id() | Stripe.Price.t(),
optional(:billing_methods) => map,
optional(:metadata) => map,
optional(:quantity) => non_neg_integer,
optional(:tax_rates) => list
}
],
optional(:default_payment_method) => Stripe.id(),
optional(:metadata) => Stripe.Types.metadata(),
optional(:prorate) => boolean,
optional(:proration_behavior) => String.t(),
optional(:proration_date) => Stripe.timestamp(),
optional(:tax_percent) => float,
optional(:trial_end) => Stripe.timestamp(),
optional(:trial_from_plan) => boolean
}
def update(id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}")
|> put_method(:post)
|> put_params(params)
|> cast_to_id([:coupon])
|> make_request()
end
@doc """
Delete a subscription.
Takes the subscription `id` or a `Stripe.Subscription` struct.
"""
@spec delete(Stripe.id() | t) :: {:ok, t} | {:error, Stripe.Error.t()}
def delete(id), do: delete(id, %{}, [])
@doc """
Delete a subscription.
Takes the subscription `id` or a `Stripe.Subscription` struct.
Second argument can be a map of cancellation `params`, such as `invoice_now`,
or a list of options, such as custom API key.
### Deprecated Usage
Passing a map with `at_period_end: true` to `Subscription.delete/2`
is deprecated. Use `Subscription.update/2` with
`cancel_at_period_end: true` instead.
"""
@spec delete(Stripe.id() | t, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
def delete(id, opts) when is_list(opts) do
delete(id, %{}, opts)
end
@spec delete(Stripe.id() | t, %{at_period_end: true}) :: {:ok, t} | {:error, Stripe.Error.t()}
def delete(id, %{at_period_end: true}) do
log_deprecation("Use Stripe.Subscription.update/2 with `cancel_at_period_end: true`")
update(id, %{cancel_at_period_end: true})
end
@spec delete(Stripe.id() | t, params) :: {:ok, t} | {:error, Stripe.Error.t()}
when params: %{
optional(:invoice_now) => boolean,
optional(:prorate) => boolean
}
def delete(id, params) when is_map(params) do
delete(id, params, [])
end
@doc """
Delete a subscription.
Takes the subscription `id` or a `Stripe.Subscription` struct.
Second argument is a map of cancellation `params`, such as `invoice_now`.
Third argument is a list of options, such as custom API key.
"""
@spec delete(Stripe.id() | t, %{at_period_end: true}, Stripe.options()) ::
{:ok, t} | {:error, Stripe.Error.t()}
def delete(id, %{at_period_end: true}, opts) do
log_deprecation("Use Stripe.Subscription.update/2 with `cancel_at_period_end: true`")
update(id, %{cancel_at_period_end: true}, opts)
end
@spec delete(Stripe.id() | t, params, Stripe.options()) ::
{:ok, t} | {:error, Stripe.Error.t()}
when params: %{
optional(:invoice_now) => boolean,
optional(:prorate) => boolean
}
def delete(id, params, opts) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}")
|> put_method(:delete)
|> put_params(params)
|> make_request()
end
@doc """
List all subscriptions.
"""
@spec list(params, Stripe.options()) :: {:ok, Stripe.List.t(t)} | {:error, Stripe.Error.t()}
when params: %{
optional(:collection_method) => String.t(),
optional(:created) => Stripe.date_query(),
optional(:customer) => Stripe.Customer.t() | Stripe.id(),
optional(:ending_before) => t | Stripe.id(),
optional(:limit) => 1..100,
optional(:plan) => Stripe.Plan.t() | Stripe.id(),
optional(:price) => Stripe.Price.t() | Stripe.id(),
optional(:starting_after) => t | Stripe.id(),
optional(:status) => String.t()
}
def list(params \\ %{}, opts \\ []) do
new_request(opts)
|> prefix_expansions()
|> put_endpoint(@plural_endpoint)
|> put_method(:get)
|> put_params(params)
|> cast_to_id([:customer, :ending_before, :plan, :price, :starting_after])
|> make_request()
end
@doc """
Deletes the discount on a subscription.
"""
@spec delete_discount(Stripe.id() | t, Stripe.options()) ::
{:ok, t} | {:error, Stripe.Error.t()}
def delete_discount(id, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}/discount")
|> put_method(:delete)
|> make_request()
end
end
| 35.374622 | 98 | 0.582202 |
ff64ffbbabcd09ca1da49ced0491ce3e2ba8a338 | 898 | exs | Elixir | mix.exs | fremantle-industries/clustered | 511098794f6fbd4fce33ed79e855b46bb91e6012 | [
"MIT"
] | null | null | null | mix.exs | fremantle-industries/clustered | 511098794f6fbd4fce33ed79e855b46bb91e6012 | [
"MIT"
] | 11 | 2021-09-01T06:02:13.000Z | 2022-03-01T06:03:07.000Z | mix.exs | fremantle-industries/clustered | 511098794f6fbd4fce33ed79e855b46bb91e6012 | [
"MIT"
] | null | null | null | defmodule Clustered.MixProject do
use Mix.Project
def project do
[
app: :clustered,
version: "0.0.1",
elixir: "~> 1.11",
package: package(),
start_permanent: Mix.env() == :prod,
description: description(),
deps: deps()
]
end
def application do
[
extra_applications: [:logger]
]
end
defp deps do
[
{:ex_doc, ">= 0.0.0", only: :dev},
{:dialyxir, "~> 1.0", only: [:dev], runtime: false},
{:mix_test_watch, "~> 1.0", only: :dev, runtime: false},
{:ex_unit_notifier, "~> 1.0", only: :test},
{:excoveralls, "~> 0.10", only: :test}
]
end
defp description do
"Elixir cluster utilities"
end
defp package do
%{
licenses: ["MIT"],
maintainers: ["Alex Kwiatkowski"],
links: %{"GitHub" => "https://github.com/fremantle-industries/clustered"}
}
end
end
| 20.409091 | 79 | 0.548998 |
ff6533c51e98f2e1add914fc1484b45bb9f3baf6 | 695 | ex | Elixir | lib/cronex/test/date_time.ex | clangley/cronex | 32992311abb1f444dcd86f75cc4d8ad41dbb2b0a | [
"MIT"
] | 45 | 2016-11-03T11:00:33.000Z | 2021-02-03T14:59:58.000Z | lib/cronex/test/date_time.ex | clangley/cronex | 32992311abb1f444dcd86f75cc4d8ad41dbb2b0a | [
"MIT"
] | 19 | 2017-09-25T17:13:00.000Z | 2020-07-08T22:53:24.000Z | lib/cronex/test/date_time.ex | clangley/cronex | 32992311abb1f444dcd86f75cc4d8ad41dbb2b0a | [
"MIT"
] | 18 | 2017-01-20T22:00:45.000Z | 2021-06-16T12:47:53.000Z | defmodule Cronex.Test.DateTime do
@moduledoc """
Simple DateTime provider that is static and user manipulated.
"""
def start_link do
Agent.start_link(fn -> DateTime.from_unix!(0) end, name: __MODULE__)
end
@doc """
Sets the DateTime value of the provider.
"""
def set(args) when is_list(args) do
args_map = Enum.into(args, Map.new())
Agent.update(__MODULE__, fn date_time -> Map.merge(date_time, args_map) end)
end
@doc """
Gets the current DateTime value of the provider.
"""
def get do
Agent.get(__MODULE__, & &1)
end
@doc """
An alias for the `get/0` function, to mimic the `DateTime` module behaviour.
"""
def utc_now, do: get()
end
| 23.166667 | 80 | 0.667626 |
ff653c407cea3297cd8acf55263cf1b21b497225 | 3,760 | exs | Elixir | apps/astarte_realm_management_api/test/astarte_realm_management_api_web/controllers/realm_config_controller_test.exs | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 191 | 2018-03-30T13:23:08.000Z | 2022-03-02T12:05:32.000Z | apps/astarte_realm_management_api/test/astarte_realm_management_api_web/controllers/realm_config_controller_test.exs | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 402 | 2018-03-30T13:37:00.000Z | 2022-03-31T16:47:10.000Z | apps/astarte_realm_management_api/test/astarte_realm_management_api_web/controllers/realm_config_controller_test.exs | Annopaolo/astarte | f8190e8bf044759a9b84bdeb5786a55b6f793a4f | [
"Apache-2.0"
] | 24 | 2018-03-30T13:29:48.000Z | 2022-02-28T11:10:26.000Z | #
# This file is part of Astarte.
#
# Copyright 2018 Ispirata Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule Astarte.RealmManagement.APIWeb.RealmControllerTest do
use Astarte.RealmManagement.APIWeb.ConnCase
alias Astarte.RealmManagement.API.Config
alias Astarte.RealmManagement.API.JWTTestHelper
alias Astarte.RealmManagement.Mock.DB
@new_pubkey """
-----BEGIN PUBLIC KEY-----
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvO/EdyxtA8ssxSnYQr7g
TC41/0QMvhHMqtqYSKEs1d8brOgBg51XRz1mh04v3s/k85kZq+MB4lrKzUxu0781
MPkZpSEHU2ICD/kzE5DUDcwgvsnTBVGFX8UuGnCOICEV6rtjA+6G7Q1rEmZ017xc
lCVz0J0EzzTzBoB1p7x56wbIDn2t7QHMkqVOIpgc+2wZqVYcxogMjGU+QcfGRFNU
Q+qn3BHVDi5yY75LCvT8h4rvmhK30NOSVn1V8583D7uxrVY/fh/bhlMQ0AjPZo9g
YeilQGMReWd3haRok4RT8MTThQfEJNeDZXLoZetz4ukPKInu0uE4zSAOUxkxvH6w
MQIDAQAB
-----END PUBLIC KEY-----
"""
@malformed_pubkey """
-----BEGIN PUBLIC KEY-----
MFYwEAYHKoZIzj0CAQYAoDQgAE6ssZpw4aj98a1hDKM
+bxRibfFC0G6SugduGzqIACSdIiLEn4Nubx2jt4tHDpel0BIrYKlCw==
-----END PUBLIC KEY-----
"""
@realm "config_test_realm"
@update_attrs %{"jwt_public_key_pem" => @new_pubkey}
@invalid_pubkey_attrs %{"jwt_public_key_pem" => "invalid"}
@malformed_pubkey_attrs %{"jwt_public_key_pem" => @malformed_pubkey}
setup_all do
# Disable the auth since we will mess with the public key
Config.put_disable_authentication(true)
on_exit(fn ->
# Restore auth on exit
Config.reload_disable_authentication()
end)
end
setup %{conn: conn} do
DB.put_jwt_public_key_pem(@realm, JWTTestHelper.public_key_pem())
{:ok, conn: put_req_header(conn, "accept", "application/json")}
end
test "returns the auth config on show", %{conn: conn} do
conn = get(conn, realm_config_path(conn, :show, @realm, "auth"))
assert json_response(conn, 200)["data"]["jwt_public_key_pem"] ==
JWTTestHelper.public_key_pem()
end
test "does not update auth config and renders errors when no public key is provided", %{
conn: conn
} do
conn = put(conn, realm_config_path(conn, :update, @realm, "auth"), data: %{})
assert json_response(conn, 422)["errors"] != %{}
end
test "does not update auth config and renders errors when public key is invalid", %{conn: conn} do
conn =
put(conn, realm_config_path(conn, :update, @realm, "auth"), data: @invalid_pubkey_attrs)
assert json_response(conn, 422)["errors"] != %{}
end
test "does not update auth config and renders errors when public key is malformed", %{
conn: conn
} do
conn =
put(conn, realm_config_path(conn, :update, @realm, "auth"), data: @malformed_pubkey_attrs)
assert json_response(conn, 422)["errors"] != %{}
end
test "updates and renders auth config when data is valid", %{conn: conn} do
conn = get(conn, realm_config_path(conn, :show, @realm, "auth"))
assert json_response(conn, 200)["data"]["jwt_public_key_pem"] ==
JWTTestHelper.public_key_pem()
conn = put(conn, realm_config_path(conn, :update, @realm, "auth"), data: @update_attrs)
assert response(conn, 204)
conn = get(conn, realm_config_path(conn, :show, @realm, "auth"))
assert json_response(conn, 200)["data"]["jwt_public_key_pem"] == @new_pubkey
end
end
| 34.814815 | 100 | 0.726064 |
ff655d44ae1c6ff4f7e6ed38c1ad222cc988c204 | 34,151 | ex | Elixir | lib/live_element/channel.ex | gaslight/live_element | 78d4ab0a2daab470f2ffd25d446fbabb0d746afe | [
"MIT"
] | null | null | null | lib/live_element/channel.ex | gaslight/live_element | 78d4ab0a2daab470f2ffd25d446fbabb0d746afe | [
"MIT"
] | null | null | null | lib/live_element/channel.ex | gaslight/live_element | 78d4ab0a2daab470f2ffd25d446fbabb0d746afe | [
"MIT"
] | null | null | null | defmodule LiveElement.Channel do
@moduledoc false
use GenServer, restart: :temporary
require Logger
alias LiveElement.{Socket, Utils, Diff, Upload, UploadConfig, Route, Session, Lifecycle}
alias Phoenix.Socket.Message
@prefix :phoenix
@not_mounted_at_router :not_mounted_at_router
def start_link({endpoint, from}) do
hibernate_after = endpoint.config(:live_view)[:hibernate_after] || 15000
opts = [hibernate_after: hibernate_after]
GenServer.start_link(__MODULE__, from, opts)
end
def send_update(pid \\ self(), module, id, assigns) do
send(pid, {@prefix, :send_update, {module, id, assigns}})
end
def send_update_after(pid \\ self(), module, id, assigns, time_in_milliseconds)
when is_integer(time_in_milliseconds) do
Process.send_after(
pid,
{@prefix, :send_update, {module, id, assigns}},
time_in_milliseconds
)
end
def ping(pid) do
GenServer.call(pid, {@prefix, :ping}, :infinity)
end
def register_upload(pid, {upload_config_ref, entry_ref} = _ref, cid) do
info = %{channel_pid: self(), ref: upload_config_ref, entry_ref: entry_ref, cid: cid}
GenServer.call(pid, {@prefix, :register_entry_upload, info})
end
def fetch_upload_config(pid, name, cid) do
GenServer.call(pid, {@prefix, :fetch_upload_config, name, cid})
end
def drop_upload_entries(%UploadConfig{} = conf, entry_refs) do
info = %{ref: conf.ref, entry_refs: entry_refs, cid: conf.cid}
send(self(), {@prefix, :drop_upload_entries, info})
end
@impl true
def init({pid, _ref}) do
{:ok, Process.monitor(pid)}
end
@impl true
def handle_info({Phoenix.Channel, auth_payload, from, phx_socket}, ref) do
Process.demonitor(ref)
mount(auth_payload, from, phx_socket)
rescue
# Normalize exceptions for better client debugging
e -> reraise(e, __STACKTRACE__)
end
def handle_info({:DOWN, ref, _, _, _reason}, ref) do
{:stop, {:shutdown, :closed}, ref}
end
def handle_info(
{:DOWN, _, _, transport_pid, _reason},
%{socket: %{transport_pid: transport_pid}} = state
) do
{:stop, {:shutdown, :closed}, state}
end
def handle_info({:DOWN, _, _, parent, reason}, %{socket: %{parent_pid: parent}} = state) do
send(state.socket.transport_pid, {:socket_close, self(), reason})
{:stop, {:shutdown, :parent_exited}, state}
end
def handle_info({:DOWN, _, :process, pid, reason} = msg, %{socket: socket} = state) do
case Map.fetch(state.upload_pids, pid) do
{:ok, {ref, entry_ref, cid}} ->
if reason in [:normal, {:shutdown, :closed}] do
new_state =
state
|> drop_upload_pid(pid)
|> unregister_upload(ref, entry_ref, cid)
{:noreply, new_state}
else
{:stop, {:shutdown, {:channel_upload_exit, reason}}, state}
end
:error ->
msg
|> view_handle_info(socket)
|> handle_result({:handle_info, 2, nil}, state)
end
end
def handle_info(%Message{topic: topic, event: "phx_leave"} = msg, %{topic: topic} = state) do
send(state.socket.transport_pid, {:socket_close, self(), {:shutdown, :left}})
reply(state, msg.ref, :ok, %{})
{:stop, {:shutdown, :left}, state}
end
def handle_info(%Message{topic: topic, event: "progress"} = msg, %{topic: topic} = state) do
cid = msg.payload["cid"]
new_state =
write_socket(state, cid, msg.ref, fn socket, _ ->
%{"ref" => ref, "entry_ref" => entry_ref, "progress" => progress} = msg.payload
new_socket = Upload.update_progress(socket, ref, entry_ref, progress)
upload_conf = Upload.get_upload_by_ref!(new_socket, ref)
entry = UploadConfig.get_entry_by_ref(upload_conf, entry_ref)
if event = entry && upload_conf.progress_event do
{:noreply, new_socket} = event.(upload_conf.name, entry, new_socket)
{new_socket, {:ok, {msg.ref, %{}}, state}}
else
{new_socket, {:ok, {msg.ref, %{}}, state}}
end
end)
{:noreply, new_state}
end
def handle_info(%Message{topic: topic, event: "allow_upload"} = msg, %{topic: topic} = state) do
%{"ref" => upload_ref, "entries" => entries} = payload = msg.payload
cid = payload["cid"]
new_state =
write_socket(state, cid, msg.ref, fn socket, _ ->
socket = Upload.register_cid(socket, upload_ref, cid)
conf = Upload.get_upload_by_ref!(socket, upload_ref)
ensure_unique_upload_name!(state, conf)
{ok_or_error, reply, %Socket{} = new_socket} =
with {:ok, new_socket} <- Upload.put_entries(socket, conf, entries, cid) do
Upload.generate_preflight_response(new_socket, conf.name, cid)
end
new_upload_names =
case ok_or_error do
:ok -> Map.put(state.upload_names, conf.name, {upload_ref, cid})
_ -> state.upload_names
end
{new_socket, {:ok, {msg.ref, reply}, %{state | upload_names: new_upload_names}}}
end)
{:noreply, new_state}
end
def handle_info(
%Message{topic: topic, event: "cids_destroyed"} = msg,
%{topic: topic} = state
) do
%{"cids" => cids} = msg.payload
{deleted_cids, new_state} = delete_components(state, cids)
{:noreply, reply(new_state, msg.ref, :ok, %{cids: deleted_cids})}
end
def handle_info(%Message{topic: topic, event: "event"} = msg, %{topic: topic} = state) do
%{"value" => raw_val, "event" => event, "type" => type} = msg.payload
val = decode_event_type(type, raw_val)
new_state = %{state | socket: maybe_update_uploads(state.socket, msg.payload)}
new_state.socket
|> view_handle_event(event, val)
|> handle_result({:handle_event, 3, msg.ref}, new_state)
end
def handle_info({@prefix, :drop_upload_entries, info}, state) do
%{ref: ref, cid: cid, entry_refs: entry_refs} = info
new_state =
write_socket(state, cid, nil, fn socket, _ ->
upload_config = Upload.get_upload_by_ref!(socket, ref)
{Upload.drop_upload_entries(socket, upload_config, entry_refs), {:ok, nil, state}}
end)
{:noreply, new_state}
end
def handle_info({@prefix, :send_update, update}, state) do
case Diff.update_component(state.socket, state.components, update) do
{diff, new_components} ->
{:noreply, push_diff(%{state | components: new_components}, diff, nil)}
:noop ->
{module, id, _} = update
if function_exported?(module, :__info__, 1) do
# Only a warning, because there can be race conditions where a component is removed before a `send_update` happens.
Logger.debug(
"send_update failed because component #{inspect(module)} with ID #{inspect(id)} does not exist or it has been removed"
)
else
raise ArgumentError, "send_update failed (module #{inspect(module)} is not available)"
end
{:noreply, state}
end
end
def handle_info({@prefix, :redirect, command, flash}, state) do
handle_redirect(state, command, flash, nil)
end
def handle_info(msg, %{socket: socket} = state) do
msg
|> view_handle_info(socket)
|> handle_result({:handle_info, 2, nil}, state)
end
@impl true
def handle_call({@prefix, :ping}, _from, state) do
{:reply, :ok, state}
end
def handle_call({@prefix, :fetch_upload_config, name, cid}, _from, state) do
read_socket(state, cid, fn socket, _ ->
result =
with {:ok, uploads} <- Map.fetch(socket.assigns, :uploads),
{:ok, conf} <- Map.fetch(uploads, name),
do: {:ok, conf}
{:reply, result, state}
end)
end
def handle_call({@prefix, :child_mount, _child_pid, assign_new}, _from, state) do
assigns = Map.take(state.socket.assigns, assign_new)
{:reply, {:ok, assigns}, state}
end
def handle_call({@prefix, :register_entry_upload, info}, from, state) do
{:noreply, register_entry_upload(state, from, info)}
end
def handle_call(msg, from, %{socket: socket} = state) do
case socket.view.handle_call(msg, from, socket) do
{:reply, reply, %Socket{} = new_socket} ->
case handle_changed(state, new_socket, nil) do
{:noreply, new_state} -> {:reply, reply, new_state}
{:stop, reason, new_state} -> {:stop, reason, reply, new_state}
end
other ->
handle_result(other, {:handle_call, 3, nil}, state)
end
end
@impl true
def handle_cast(msg, %{socket: socket} = state) do
msg
|> socket.view.handle_cast(socket)
|> handle_result({:handle_cast, 2, nil}, state)
end
@impl true
def terminate(reason, %{socket: socket}) do
%{view: view} = socket
if function_exported?(view, :terminate, 2) do
view.terminate(reason, socket)
else
:ok
end
end
def terminate(_reason, _state) do
:ok
end
@impl true
def code_change(old, %{socket: socket} = state, extra) do
%{view: view} = socket
if function_exported?(view, :code_change, 3) do
view.code_change(old, socket, extra)
else
{:ok, state}
end
end
defp view_handle_event(%Socket{} = socket, "lv:clear-flash", val) do
case val do
%{"key" => key} -> {:noreply, Utils.clear_flash(socket, key)}
_ -> {:noreply, Utils.clear_flash(socket)}
end
end
defp view_handle_event(%Socket{}, "lv:" <> _ = bad_event, _val) do
raise ArgumentError, """
received unknown LiveView event #{inspect(bad_event)}.
The following LiveView events are supported: lv:clear-flash.
"""
end
defp view_handle_event(%Socket{} = socket, event, val) do
:telemetry.span(
[:phoenix, :live_view, :handle_event],
%{socket: socket, event: event, params: val},
fn ->
case Lifecycle.handle_event(event, val, socket) do
{:halt, %Socket{} = socket} ->
{{:noreply, socket}, %{socket: socket, event: event, params: val}}
{:cont, %Socket{} = socket} ->
case socket.view.handle_event(event, val, socket) do
{:noreply, %Socket{} = socket} ->
{{:noreply, socket}, %{socket: socket, event: event, params: val}}
{:reply, reply, %Socket{} = socket} ->
{{:reply, reply, socket}, %{socket: socket, event: event, params: val}}
other ->
raise_bad_callback_response!(other, socket.view, :handle_event, 3)
end
end
end
)
end
defp view_handle_info(msg, %{view: view} = socket) do
case Lifecycle.handle_info(msg, socket) do
{:halt, %Socket{} = socket} -> {:noreply, socket}
{:cont, %Socket{} = socket} -> view.handle_info(msg, socket)
end
end
defp maybe_call_mount_handle_params(%{socket: socket} = state, router, url, params) do
%{view: view, redirected: mount_redirect} = socket
lifecycle = Lifecycle.stage_info(socket, view, :handle_params, 3)
cond do
mount_redirect ->
mount_handle_params_result({:noreply, socket}, state, :mount)
not lifecycle.any? ->
{:diff, diff, new_state} = render_diff(state, socket, true)
{:ok, diff, :mount, new_state}
socket.root_pid != self() or is_nil(router) ->
# Let the callback fail for the usual reasons
Route.live_link_info!(%{socket | router: nil}, view, url)
params == @not_mounted_at_router ->
raise "cannot invoke handle_params/3 for #{inspect(view)} because #{inspect(view)}" <>
" was not mounted at the router with the live/3 macro under URL #{inspect(url)}"
true ->
socket
|> Utils.call_handle_params!(view, lifecycle.exported?, params, url)
|> mount_handle_params_result(state, :mount)
end
end
defp mount_handle_params_result({:noreply, %Socket{} = new_socket}, state, redir) do
new_state = %{state | socket: new_socket}
case maybe_diff(new_state, true) do
{:diff, diff, new_state} ->
{:ok, diff, redir, new_state}
{:redirect, %{to: _to} = opts} ->
{:redirect, copy_flash(new_state, Utils.get_flash(new_socket), opts), new_state}
{:redirect, %{external: url}} ->
{:redirect, copy_flash(new_state, Utils.get_flash(new_socket), %{to: url}), new_state}
{:live, :redirect, %{to: _to} = opts} ->
{:live_redirect, copy_flash(new_state, Utils.get_flash(new_socket), opts), new_state}
{:live, {params, action}, %{to: to} = opts} ->
%{socket: new_socket} = new_state = drop_redirect(new_state)
uri = build_uri(new_state, to)
new_socket
|> assign_action(action)
|> Utils.call_handle_params!(new_socket.view, params, uri)
|> mount_handle_params_result(new_state, {:live_patch, opts})
end
end
defp handle_result(
{:reply, %{} = reply, %Socket{} = new_socket},
{:handle_event, 3, ref},
state
) do
handle_changed(state, Utils.put_reply(new_socket, reply), ref)
end
defp handle_result({:noreply, %Socket{} = new_socket}, {_from, _arity, ref}, state) do
handle_changed(state, new_socket, ref)
end
defp handle_result(result, {name, arity, _ref}, state) do
raise_bad_callback_response!(result, state.socket.view, name, arity)
end
defp raise_bad_callback_response!(result, view, :handle_call, 3) do
raise ArgumentError, """
invalid noreply from #{inspect(view)}.handle_call/3 callback.
Expected one of:
{:noreply, %Socket{}}
{:reply, map, %Socket}
Got: #{inspect(result)}
"""
end
defp raise_bad_callback_response!(result, view, :handle_event, arity) do
raise ArgumentError, """
invalid return from #{inspect(view)}.handle_event/#{arity} callback.
Expected one of:
{:noreply, %Socket{}}
{:reply, map, %Socket{}}
Got: #{inspect(result)}
"""
end
defp raise_bad_callback_response!(result, view, name, arity) do
raise ArgumentError, """
invalid noreply from #{inspect(view)}.#{name}/#{arity} callback.
Expected one of:
{:noreply, %Socket{}}
Got: #{inspect(result)}
"""
end
defp unregister_upload(state, ref, entry_ref, cid) do
write_socket(state, cid, nil, fn socket, _ ->
conf = Upload.get_upload_by_ref!(socket, ref)
new_state = drop_upload_name(state, conf.name)
{Upload.unregister_completed_entry_upload(socket, conf, entry_ref), {:ok, nil, new_state}}
end)
end
defp put_upload_pid(state, pid, ref, entry_ref, cid) when is_pid(pid) do
Process.monitor(pid)
%{state | upload_pids: Map.put(state.upload_pids, pid, {ref, entry_ref, cid})}
end
defp drop_upload_pid(state, pid) when is_pid(pid) do
%{state | upload_pids: Map.delete(state.upload_pids, pid)}
end
defp drop_upload_name(state, name) do
{_, new_state} = pop_in(state.upload_names[name])
new_state
end
defp decode_event_type("form", url_encoded) do
url_encoded
|> Plug.Conn.Query.decode()
|> decode_merge_target()
end
defp decode_event_type(_, value), do: value
defp decode_merge_target(%{"_target" => target} = params) when is_list(target), do: params
defp decode_merge_target(%{"_target" => target} = params) when is_binary(target) do
keyspace = target |> Plug.Conn.Query.decode() |> gather_keys([])
Map.put(params, "_target", Enum.reverse(keyspace))
end
defp decode_merge_target(%{} = params), do: params
defp gather_keys(%{} = map, acc) do
case Enum.at(map, 0) do
{key, val} -> gather_keys(val, [key | acc])
nil -> acc
end
end
defp gather_keys([], acc), do: acc
defp gather_keys([%{} = map], acc), do: gather_keys(map, acc)
defp gather_keys(_, acc), do: acc
defp handle_changed(state, %Socket{} = new_socket, ref, pending_live_patch \\ nil) do
new_state = %{state | socket: new_socket}
case maybe_diff(new_state, false) do
{:diff, diff, new_state} ->
{:noreply,
new_state
|> push_live_patch(pending_live_patch)
|> push_diff(diff, ref)}
result ->
handle_redirect(new_state, result, Utils.changed_flash(new_socket), ref)
end
end
defp maybe_push_pending_diff_ack(state, nil), do: state
defp maybe_push_pending_diff_ack(state, {diff, ref}), do: push_diff(state, diff, ref)
defp handle_redirect(new_state, result, flash, ref, pending_diff_ack \\ nil) do
%{socket: new_socket} = new_state
root_pid = new_socket.root_pid
case result do
{:redirect, %{external: to} = opts} ->
opts =
copy_flash(new_state, flash, opts)
|> Map.delete(:external)
|> Map.put(:to, to)
new_state
|> push_redirect(opts, ref)
|> stop_shutdown_redirect(:redirect, opts)
{:redirect, %{to: _to} = opts} ->
opts = copy_flash(new_state, flash, opts)
new_state
|> push_redirect(opts, ref)
|> stop_shutdown_redirect(:redirect, opts)
{:live, :redirect, %{to: _to} = opts} ->
opts = copy_flash(new_state, flash, opts)
new_state
|> push_live_redirect(opts, ref, pending_diff_ack)
|> stop_shutdown_redirect(:live_redirect, opts)
{:live, {params, action}, %{to: _to, kind: _kind} = opts} when root_pid == self() ->
new_state
|> drop_redirect()
|> maybe_push_pending_diff_ack(pending_diff_ack)
|> Map.update!(:socket, &Utils.replace_flash(&1, flash))
|> sync_handle_params_with_live_redirect(params, action, opts, ref)
{:live, {_params, _action}, %{to: _to, kind: _kind}} = patch ->
send(new_socket.root_pid, {@prefix, :redirect, patch, flash})
{:diff, diff, new_state} = render_diff(new_state, new_socket, false)
{:noreply,
new_state
|> drop_redirect()
|> maybe_push_pending_diff_ack(pending_diff_ack)
|> push_diff(diff, ref)}
end
end
defp stop_shutdown_redirect(state, kind, opts) do
send(state.socket.transport_pid, {:socket_close, self(), {kind, opts}})
{:stop, {:shutdown, {kind, opts}}, state}
end
defp drop_redirect(state) do
put_in(state.socket.redirected, nil)
end
defp sync_handle_params_with_live_redirect(state, params, action, %{to: to} = opts, ref) do
%{socket: socket} = state
{:noreply, %Socket{} = new_socket} =
socket
|> assign_action(action)
|> Utils.call_handle_params!(socket.view, params, build_uri(state, to))
handle_changed(state, new_socket, ref, opts)
end
defp push_live_patch(state, nil), do: state
defp push_live_patch(state, opts), do: push(state, "live_patch", opts)
defp push_redirect(state, opts, nil = _ref) do
push(state, "redirect", opts)
end
defp push_redirect(state, opts, ref) do
reply(state, ref, :ok, %{redirect: opts})
end
defp push_live_redirect(state, opts, nil = _ref, {_diff, ack_ref}) do
reply(state, ack_ref, :ok, %{live_redirect: opts})
end
defp push_live_redirect(state, opts, nil = _ref, _pending_diff_ack) do
push(state, "live_redirect", opts)
end
defp push_live_redirect(state, opts, ref, _pending_diff_ack) do
reply(state, ref, :ok, %{live_redirect: opts})
end
defp push_noop(state, nil = _ref), do: state
defp push_noop(state, ref), do: reply(state, ref, :ok, %{})
defp push_diff(state, diff, ref) when diff == %{}, do: push_noop(state, ref)
defp push_diff(state, diff, nil = _ref), do: push(state, "diff", diff)
defp push_diff(state, diff, ref), do: reply(state, ref, :ok, %{diff: diff})
defp copy_flash(_state, flash, opts) when flash == %{},
do: opts
defp copy_flash(state, flash, opts),
do: Map.put(opts, :flash, Utils.sign_flash(state.socket.endpoint, flash))
defp maybe_diff(%{socket: socket} = state, force?) do
socket.redirected || render_diff(state, socket, force?)
end
defp render_diff(state, socket, force?) do
{socket, diff, components} =
if force? or Utils.changed?(socket) do
rendered = Utils.to_rendered(socket, socket.view)
Diff.render(socket, rendered, state.components)
else
{socket, %{}, state.components}
end
diff = Diff.render_private(socket, diff)
{:diff, diff, %{state | socket: Utils.clear_changed(socket), components: components}}
end
defp reply(state, {ref, extra}, status, payload) do
reply(state, ref, status, Map.merge(payload, extra))
end
defp reply(state, ref, status, payload) when is_binary(ref) do
reply_ref = {state.socket.transport_pid, state.serializer, state.topic, ref, state.join_ref}
Phoenix.Channel.reply(reply_ref, {status, payload})
state
end
defp push(state, event, payload) do
message = %Message{
topic: state.topic,
event: event,
payload: payload,
join_ref: state.join_ref
}
send(state.socket.transport_pid, state.serializer.encode!(message))
state
end
## Mount
defp mount(
%{"session" => session_token, "params" => %{"module" => module_name} = join_params} = params,
from,
phx_socket
) do
%Phoenix.Socket{endpoint: endpoint, topic: topic} = phx_socket
case build_session(module_name, join_params) do
{:ok, %Session{} = verified} ->
%Phoenix.Socket{private: %{connect_info: connect_info}} = phx_socket
case connect_info do
%{} ->
case authorize_session(verified, endpoint, params) do
{:ok, %Session{} = new_verified, route, url} ->
verified_mount(new_verified, route, url, params, from, phx_socket, connect_info)
{:error, :unauthorized} ->
GenServer.reply(from, {:error, %{reason: "unauthorized"}})
{:stop, :shutdown, :no_state}
{:error, _reason} ->
GenServer.reply(from, {:error, %{reason: "stale"}})
{:stop, :shutdown, :no_state}
end
end
{:error, _reason} ->
GenServer.reply(from, {:error, %{reason: "stale"}})
{:stop, :shutdown, :no_state}
end
end
defp mount(%{}, from, phx_socket) do
Logger.error("Mounting #{phx_socket.topic} failed because no session was provided")
GenServer.reply(from, {:error, %{reason: "stale"}})
{:stop, :shutdown, :no_session}
end
defp build_session(module_name, session) do
module = String.to_existing_atom("Elixir.#{module_name}")
{:ok,
%LiveElement.Session{
assign_new: [],
flash: %{},
id: "phx-Fq6e4BAJtMAudgHH",
live_session_name: :default,
live_session_vsn: 1_634_418_391_780_893_900,
parent_pid: nil,
redirected?: false,
root_pid: nil,
root_view: module,
session: session,
view: module
}}
end
defp verify_flash(endpoint, %Session{} = verified, flash_token, connect_params) do
cond do
# flash_token is given by the client on live_redirects and has higher priority.
flash_token ->
Utils.verify_flash(endpoint, flash_token)
# verified.flash comes from the disconnected render, therefore we only want
# to load it we are not inside a live redirect and if it is our first mount.
not verified.redirected? && connect_params["_mounts"] == 0 && verified.flash ->
verified.flash
true ->
%{}
end
end
defp verified_mount(%Session{} = verified, route, url, params, from, phx_socket, connect_info) do
%Session{
id: id,
view: view,
root_view: root_view,
parent_pid: parent,
root_pid: root_pid,
session: verified_user_session,
assign_new: assign_new,
router: router
} = verified
# Make sure the view is loaded. Otherwise if the first request
# ever is a LiveView connection, the view won't be loaded and
# the mount/handle_params callbacks won't be invoked as they
# are optional, leading to errors.
config = view.__live__()
live_session_on_mount = load_live_session_on_mount(route)
lifecycle = lifecycle(config, live_session_on_mount)
%Phoenix.Socket{
endpoint: endpoint,
transport_pid: transport_pid
} = phx_socket
# Optional parameter handling
connect_params = params["params"]
# Optional verified parts
flash = verify_flash(endpoint, verified, params["flash"], connect_params)
socket_session = connect_info[:session] || %{}
Process.monitor(transport_pid)
load_csrf_token(endpoint, socket_session)
case params do
%{"caller" => {pid, _}} when is_pid(pid) -> Process.put(:"$callers", [pid])
_ -> Process.put(:"$callers", [transport_pid])
end
socket = %Socket{
endpoint: endpoint,
view: view,
transport_pid: transport_pid,
parent_pid: parent,
root_pid: root_pid || self(),
id: id,
router: router
}
{params, host_uri, action} =
case route do
%Route{} = route ->
{route.params, route.uri, route.action}
nil ->
{@not_mounted_at_router, @not_mounted_at_router, nil}
end
merged_session = Map.merge(socket_session, verified_user_session)
case mount_private(parent, root_view, assign_new, connect_params, connect_info, lifecycle) do
{:ok, mount_priv} ->
socket = Utils.configure_socket(socket, mount_priv, action, flash, host_uri)
socket
|> Utils.maybe_call_live_view_mount!(view, params, merged_session)
|> build_state(phx_socket)
|> maybe_call_mount_handle_params(router, url, params)
|> reply_mount(from, verified, route)
{:error, :noproc} ->
GenServer.reply(from, {:error, %{reason: "stale"}})
{:stop, :shutdown, :no_state}
end
end
defp load_csrf_token(endpoint, socket_session) do
if token = socket_session["_csrf_token"] do
state = Plug.CSRFProtection.dump_state_from_session(token)
secret_key_base = endpoint.config(:secret_key_base)
Plug.CSRFProtection.load_state(secret_key_base, state)
end
end
defp load_live_session_on_mount(%Route{live_session: %{extra: %{on_mount: hooks}}}), do: hooks
defp load_live_session_on_mount(_), do: []
defp lifecycle(%{lifecycle: lifecycle}, []), do: lifecycle
defp lifecycle(%{lifecycle: lifecycle}, on_mount) do
%{lifecycle | mount: on_mount ++ lifecycle.mount}
end
defp mount_private(nil, root_view, assign_new, connect_params, connect_info, lifecycle) do
{:ok,
%{
connect_params: connect_params,
connect_info: connect_info,
assign_new: {%{}, assign_new},
lifecycle: lifecycle,
root_view: root_view,
__changed__: %{}
}}
end
defp mount_private(parent, root_view, assign_new, connect_params, connect_info, lifecycle) do
case sync_with_parent(parent, assign_new) do
{:ok, parent_assigns} ->
# Child live views always ignore the layout on `:use`.
{:ok,
%{
connect_params: connect_params,
connect_info: connect_info,
assign_new: {parent_assigns, assign_new},
phoenix_live_layout: false,
lifecycle: lifecycle,
root_view: root_view,
__changed__: %{}
}}
{:error, :noproc} ->
{:error, :noproc}
end
end
defp sync_with_parent(parent, assign_new) do
_ref = Process.monitor(parent)
try do
GenServer.call(parent, {@prefix, :child_mount, self(), assign_new})
catch
:exit, {:noproc, _} -> {:error, :noproc}
end
end
defp put_container(%Session{} = session, %Route{} = route, %{} = diff) do
if container = session.redirected? && Route.container(route) do
{tag, attrs} = container
Map.put(diff, :container, [tag, Enum.into(attrs, %{})])
else
diff
end
end
defp put_container(%Session{}, nil = _route, %{} = diff), do: diff
defp reply_mount(result, from, %Session{} = session, route) do
case result do
{:ok, diff, :mount, new_state} ->
reply = put_container(session, route, %{rendered: diff})
GenServer.reply(from, {:ok, reply})
{:noreply, post_verified_mount(new_state)}
{:ok, diff, {:live_patch, opts}, new_state} ->
reply = put_container(session, route, %{rendered: diff, live_patch: opts})
GenServer.reply(from, {:ok, reply})
{:noreply, post_verified_mount(new_state)}
{:live_redirect, opts, new_state} ->
GenServer.reply(from, {:error, %{live_redirect: opts}})
{:stop, :shutdown, new_state}
{:redirect, opts, new_state} ->
GenServer.reply(from, {:error, %{redirect: opts}})
{:stop, :shutdown, new_state}
end
end
defp build_state(%Socket{} = lv_socket, %Phoenix.Socket{} = phx_socket) do
%{
join_ref: phx_socket.join_ref,
serializer: phx_socket.serializer,
socket: lv_socket,
topic: phx_socket.topic,
components: Diff.new_components(),
upload_names: %{},
upload_pids: %{}
}
end
defp build_uri(%{socket: socket}, "/" <> _ = to) do
URI.to_string(%{socket.host_uri | path: to})
end
defp post_verified_mount(%{socket: socket} = state) do
%{state | socket: Utils.post_mount_prune(socket)}
end
defp assign_action(socket, action) do
LiveElement.assign(socket, :live_action, action)
end
defp maybe_update_uploads(%Socket{} = socket, %{"uploads" => uploads} = payload) do
cid = payload["cid"]
Enum.reduce(uploads, socket, fn {ref, entries}, acc ->
upload_conf = Upload.get_upload_by_ref!(acc, ref)
case Upload.put_entries(acc, upload_conf, entries, cid) do
{:ok, new_socket} -> new_socket
{:error, _error_resp, %Socket{} = new_socket} -> new_socket
end
end)
end
defp maybe_update_uploads(%Socket{} = socket, %{} = _payload), do: socket
defp register_entry_upload(state, from, info) do
%{channel_pid: pid, ref: ref, entry_ref: entry_ref, cid: cid} = info
write_socket(state, cid, nil, fn socket, _ ->
conf = Upload.get_upload_by_ref!(socket, ref)
case Upload.register_entry_upload(socket, conf, pid, entry_ref) do
{:ok, new_socket, entry} ->
reply = %{max_file_size: entry.client_size, chunk_timeout: conf.chunk_timeout}
GenServer.reply(from, {:ok, reply})
new_state = put_upload_pid(state, pid, ref, entry_ref, cid)
{new_socket, {:ok, nil, new_state}}
{:error, reason} ->
GenServer.reply(from, {:error, reason})
{socket, :error}
end
end)
end
defp read_socket(state, nil = _cid, func) do
func.(state.socket, nil)
end
defp read_socket(state, cid, func) do
%{socket: socket, components: components} = state
Diff.read_component(socket, cid, components, func)
end
# If :error is returned, the socket must not change,
# otherwise we need to call push_diff on all cases.
defp write_socket(state, nil, ref, fun) do
{new_socket, return} = fun.(state.socket, nil)
case return do
{:ok, ref_reply, new_state} ->
{:noreply, new_state} = handle_changed(new_state, new_socket, ref_reply)
new_state
:error ->
push_noop(state, ref)
end
end
defp write_socket(state, cid, ref, fun) do
%{socket: socket, components: components} = state
{diff, new_components, return} =
case Diff.write_component(socket, cid, components, fun) do
{_diff, _new_components, _return} = triplet -> triplet
:error -> {%{}, components, :error}
end
case return do
{:ok, ref_reply, new_state} ->
new_state = %{new_state | components: new_components}
push_diff(new_state, diff, ref_reply)
:error ->
push_noop(state, ref)
end
end
defp delete_components(state, cids) do
upload_cids = Enum.into(state.upload_names, MapSet.new(), fn {_name, {_ref, cid}} -> cid end)
Enum.flat_map_reduce(cids, state, fn cid, acc ->
{deleted_cids, new_components} = Diff.delete_component(cid, acc.components)
canceled_confs =
deleted_cids
|> Enum.filter(fn deleted_cid -> deleted_cid in upload_cids end)
|> Enum.flat_map(fn deleted_cid ->
read_socket(acc, deleted_cid, fn c_socket, _ ->
{_new_c_socket, canceled_confs} = Upload.maybe_cancel_uploads(c_socket)
canceled_confs
end)
end)
new_state =
Enum.reduce(canceled_confs, acc, fn conf, acc -> drop_upload_name(acc, conf.name) end)
{deleted_cids, %{new_state | components: new_components}}
end)
end
defp ensure_unique_upload_name!(state, conf) do
upload_ref = conf.ref
cid = conf.cid
case Map.fetch(state.upload_names, conf.name) do
{:ok, {^upload_ref, ^cid}} ->
:ok
:error ->
:ok
{:ok, {_existing_ref, existing_cid}} ->
raise RuntimeError, """
existing upload for #{conf.name} already allowed in another component (#{existing_cid})
If you want to allow simultaneous uploads across different components,
pass a unique upload name to allow_upload/3
"""
end
end
defp authorize_session(%Session{} = session, endpoint, %{"redirect" => url}) do
if redir_route = session_route(session, endpoint, url) do
case Session.authorize_root_redirect(session, redir_route) do
{:ok, %Session{} = new_session} -> {:ok, new_session, redir_route, url}
{:error, :unauthorized} = err -> err
end
else
{:error, :unauthorized}
end
end
defp authorize_session(%Session{} = session, endpoint, %{"url" => url}) do
if Session.main?(session) do
{:ok, session, session_route(session, endpoint, url), url}
else
{:ok, session, _route = nil, _url = nil}
end
end
defp authorize_session(%Session{} = session, _endpoint, %{} = _params) do
{:ok, session, _route = nil, _url = nil}
end
defp session_route(%Session{} = session, endpoint, url) do
case Route.live_link_info(endpoint, session.router, url) do
{:internal, %Route{} = route} -> route
_ -> nil
end
end
end
| 31.738848 | 130 | 0.634154 |
ff6584f3fe58863a64653eff953180c1c92fdbc1 | 245 | ex | Elixir | lib/farmbot/http/helpers.ex | pdgonzalez872/farmbot_os | a444248f05ee8f4fe57f6a4865b942131960f76c | [
"MIT"
] | 2 | 2018-08-01T23:07:52.000Z | 2018-10-17T12:49:21.000Z | lib/farmbot/http/helpers.ex | pdgonzalez872/farmbot_os | a444248f05ee8f4fe57f6a4865b942131960f76c | [
"MIT"
] | null | null | null | lib/farmbot/http/helpers.ex | pdgonzalez872/farmbot_os | a444248f05ee8f4fe57f6a4865b942131960f76c | [
"MIT"
] | 1 | 2017-07-22T21:51:14.000Z | 2017-07-22T21:51:14.000Z | defmodule Farmbot.HTTP.Helpers do
@moduledoc """
Helpful stuff.
"""
@doc """
Helper for checking status codes
"""
defmacro is_2xx(number) do
quote do
unquote(number) > 199 and unquote(number) < 300
end
end
end
| 16.333333 | 53 | 0.628571 |
ff65a521c1845b926a196df2ffe84c1b0c8b4624 | 2,441 | ex | Elixir | lib/guardian/db/token.ex | phaibin/guardian_db | c8706b140f0aca689fdfe3c96d7fbd79ee52cdda | [
"MIT"
] | null | null | null | lib/guardian/db/token.ex | phaibin/guardian_db | c8706b140f0aca689fdfe3c96d7fbd79ee52cdda | [
"MIT"
] | null | null | null | lib/guardian/db/token.ex | phaibin/guardian_db | c8706b140f0aca689fdfe3c96d7fbd79ee52cdda | [
"MIT"
] | null | null | null | defmodule Guardian.DB.Token do
@moduledoc """
A very simple model for storing tokens generated by `Guardian`.
"""
use Ecto.Schema
import Ecto.Changeset
import Ecto.Query, only: [where: 3]
alias Guardian.DB.Token
@primary_key {:jti, :string, autogenerate: false}
@allowed_fields ~w(jti typ aud iss sub exp jwt claims)a
schema "virtual: token" do
field(:typ, :string)
field(:aud, :string)
field(:iss, :string)
field(:sub, :string)
field(:exp, :integer)
field(:jwt, :string)
field(:claims, :map)
timestamps()
end
@doc """
Find one token by matching jti and aud.
"""
def find_by_claims(claims) do
jti = Map.get(claims, "jti")
aud = Map.get(claims, "aud")
query =
query_schema()
|> where([token], token.jti == ^jti and token.aud == ^aud)
|> Map.put(:prefix, prefix())
Guardian.DB.repo().one(query)
end
@doc """
Create a new token based on the JWT and decoded claims.
"""
def create(%{"sub" => sub} = claims, jwt) do
if sub do
query_schema()
|> where([token], token.sub == ^sub)
|> Guardian.DB.repo().delete_all(prefix: prefix())
end
prepared_claims =
claims
|> Map.put("jwt", jwt)
|> Map.put("claims", claims)
%Token{}
|> Ecto.put_meta(source: schema_name())
|> Ecto.put_meta(prefix: prefix())
|> cast(prepared_claims, @allowed_fields)
|> Guardian.DB.repo().insert()
end
@doc """
Purge any tokens that are expired. This should be done periodically to keep
your DB table clean of clutter.
"""
def purge_expired_tokens do
timestamp = Guardian.timestamp()
query_schema()
|> where([token], token.exp < ^timestamp)
|> Guardian.DB.repo().delete_all(prefix: prefix())
end
@doc false
def query_schema do
{schema_name(), Token}
end
@doc false
def schema_name do
:guardian
|> Application.fetch_env!(Guardian.DB)
|> Keyword.get(:schema_name, "guardian_tokens")
end
@doc false
def prefix do
:guardian
|> Application.fetch_env!(Guardian.DB)
|> Keyword.get(:prefix, nil)
end
@doc false
def destroy_token(nil, claims, jwt), do: {:ok, {claims, jwt}}
def destroy_token(model, claims, jwt) do
case Guardian.DB.repo().delete(model) do
{:error, _} -> {:error, :could_not_revoke_token}
nil -> {:error, :could_not_revoke_token}
_ -> {:ok, {claims, jwt}}
end
end
end
| 23.028302 | 77 | 0.623105 |
ff65be9d88579ab710eafc39eedfe46468ed74d2 | 2,471 | ex | Elixir | lib/uro/v_sekai.ex | V-Sekai/uro | 0b23da65d5c7e459efcd6b2c3d9bdf91c533b737 | [
"MIT"
] | 1 | 2022-01-11T04:05:39.000Z | 2022-01-11T04:05:39.000Z | lib/uro/v_sekai.ex | V-Sekai/uro | 0b23da65d5c7e459efcd6b2c3d9bdf91c533b737 | [
"MIT"
] | 35 | 2021-02-10T08:18:57.000Z | 2021-05-06T17:19:50.000Z | lib/uro/v_sekai.ex | V-Sekai/uro | 0b23da65d5c7e459efcd6b2c3d9bdf91c533b737 | [
"MIT"
] | null | null | null | defmodule Uro.VSekai do
@moduledoc """
The VSekai context.
"""
import Ecto.Query, warn: false
alias Uro.Repo
alias Uro.VSekai.Shard
@doc """
Returns the time in seconds a shard before is considered stale.
"""
def shard_freshness_time_in_seconds, do: 30
@doc """
Returns the list of shards.
## Examples
iex> list_shards()
[%Shard{}, ...]
"""
def list_shards do
Shard
|> Repo.all
|> Repo.preload(user: [:user])
end
@doc """
Returns a list of shards last modified within the shard freshness time.
## Examples
iex> list_fresh_shards()
[%Shard{}, ...]
"""
def list_fresh_shards do
stale_timestamp = DateTime.add(DateTime.utc_now(), -shard_freshness_time_in_seconds(), :second)
Repo.all(from m in Shard, where: m.updated_at > ^stale_timestamp, preload: [:user])
end
@doc """
Gets a single shard.
Raises `Ecto.NoResultsError` if the Shard does not exist.
## Examples
iex> get_shard!(123)
%Shard{}
iex> get_shard!(456)
** (Ecto.NoResultsError)
"""
def get_shard!(id) do
Shard
|> Repo.get!(id)
|> Repo.preload(user: [:user]) end
@doc """
Creates a shard.
## Examples
iex> create_shard(%{field: value})
{:ok, %Shard{}}
iex> create_shard(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_shard(attrs \\ %{}) do
%Shard{}
|> Shard.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a shard.
## Examples
iex> update_shard(shard, %{field: new_value})
{:ok, %Shard{}}
iex> update_shard(shard, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_shard(%Shard{} = shard, attrs) do
shard
|> Shard.changeset(attrs)
|> Repo.update(force: true)
end
@doc """
Deletes a shard.
## Examples
iex> delete_shard(shard)
{:ok, %Shard{}}
iex> delete_shard(shard)
{:error, %Ecto.Changeset{}}
"""
def delete_shard(%Shard{} = shard) do
Repo.delete(shard)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking shard changes.
## Examples
iex> change_shard(shard)
%Ecto.Changeset{source: %Shard{}}
"""
def change_shard(%Shard{} = shard) do
Shard.changeset(shard, %{})
end
def get_shard_by_address(address) when is_nil(address) do
nil
end
def get_shard_by_address(address) do
Repo.get_by(Shard, address: address)
end
end
| 17.905797 | 99 | 0.604614 |
ff65c5f160881ae71177200cea0f6ac2e487ac46 | 1,144 | ex | Elixir | lib/config_cat/in_memory_cache.ex | kianmeng/elixir-sdk | 89fb73f6249f82ac8415246519c17ad4ade54760 | [
"MIT"
] | 14 | 2020-10-15T09:15:12.000Z | 2022-03-18T19:42:28.000Z | lib/config_cat/in_memory_cache.ex | kianmeng/elixir-sdk | 89fb73f6249f82ac8415246519c17ad4ade54760 | [
"MIT"
] | 54 | 2020-10-14T05:08:21.000Z | 2021-05-28T13:00:22.000Z | lib/config_cat/in_memory_cache.ex | kianmeng/elixir-sdk | 89fb73f6249f82ac8415246519c17ad4ade54760 | [
"MIT"
] | 5 | 2020-10-13T10:24:17.000Z | 2021-11-30T17:47:11.000Z | defmodule ConfigCat.InMemoryCache do
@moduledoc false
use GenServer
alias ConfigCat.ConfigCache
@type option :: {:cache_key, ConfigCache.key()}
@type options :: [option]
@behaviour ConfigCache
@spec start_link(options()) :: GenServer.on_start()
def start_link(options) do
name =
options
|> Keyword.fetch!(:cache_key)
|> name_from_cache_key()
GenServer.start_link(__MODULE__, :empty, name: name)
end
@impl ConfigCache
def get(cache_key) do
GenServer.call(name_from_cache_key(cache_key), :get)
end
@impl ConfigCache
def set(cache_key, value) do
GenServer.call(name_from_cache_key(cache_key), {:set, value})
end
defp name_from_cache_key(cache_key) do
String.to_atom(cache_key)
end
@impl GenServer
def init(state) do
{:ok, state}
end
@impl GenServer
def handle_call(:get, _from, :empty = state) do
{:reply, {:error, :not_found}, state}
end
@impl GenServer
def handle_call(:get, _from, state) do
{:reply, {:ok, state}, state}
end
@impl GenServer
def handle_call({:set, value}, _from, _state) do
{:reply, :ok, value}
end
end
| 20.070175 | 65 | 0.678322 |
ff660e7078f465730697ced25b4c5e09e37e88e4 | 2,532 | exs | Elixir | test/agent1_test.exs | ianrumford/elixir_state_map_api | a30388f4677e3c09e3536a5b10082ba459cf6522 | [
"MIT"
] | null | null | null | test/agent1_test.exs | ianrumford/elixir_state_map_api | a30388f4677e3c09e3536a5b10082ba459cf6522 | [
"MIT"
] | null | null | null | test/agent1_test.exs | ianrumford/elixir_state_map_api | a30388f4677e3c09e3536a5b10082ba459cf6522 | [
"MIT"
] | null | null | null | defmodule ExampleAgent1 do
# generate wrappers for three submaps
use Amlapio, agent: [:buttons, :menus, :checkboxes]
# generate *only* get, put and pop wrappers for the state itself and
# use a namer function to name the wrappers "agent_state_get",
# "agent_state_put" and "agent_state_pop"
use Amlapio, agent: nil, funs: [:get, :put, :pop],
namer: fn _map_name, fun_name ->
["agent_state_", to_string(fun_name)] |> Enum.join |> String.to_atom
end
# create the agent; note the default state is an empty map
def start_link(state \\ %{}) do
Agent.start_link(fn -> state end)
end
end
defmodule ExampleAgent1Test do
use ExUnit.Case
require ExampleAgent1
test "agent_submap1" do
buttons_state = %{1 => :button_back, 2 => :button_next, 3 => :button_exit}
menus_state = %{menu_a: 1, menu_b: :two, menu_c: "tre"}
checkboxes_state = %{checkbox_yesno: [:yes, :no], checkbox_bool: [true, false]}
agent_state = %{buttons: buttons_state,
menus: menus_state, checkboxes: checkboxes_state}
# create the agent
{:ok, agent} = ExampleAgent1.start_link(agent_state)
# some usage examples
assert :button_back == agent |> ExampleAgent1.buttons_get(1)
assert :button_default ==
agent |> ExampleAgent1.buttons_get(99, :button_default)
assert agent == agent |> ExampleAgent1.menus_put(:menu_d, 42)
assert menus_state |> Map.put(:menu_d, 42) == agent |> ExampleAgent1.agent_state_get(:menus)
assert {[:yes, :no], agent} ==
agent |> ExampleAgent1.checkboxes_pop(:checkbox_yesno)
end
test "agent_state1" do
buttons_state = %{1 => :button_back, 2 => :button_next, 3 => :button_exit}
menus_state = %{menu_a: 1, menu_b: :two, menu_c: "tre"}
checkboxes_state = %{checkbox_yesno: [:yes, :no], checkbox_bool: [true, false]}
agent_state = %{buttons: buttons_state, menus: menus_state, checkboxes: checkboxes_state}
# create the agent
{:ok, agent} = ExampleAgent1.start_link(agent_state)
# some usage examples
assert buttons_state == agent |> ExampleAgent1.agent_state_get(:buttons)
assert agent == agent |> ExampleAgent1.agent_state_put(:menus, 42)
assert 42 == agent |> Agent.get(fn s -> s end) |> Map.get(:menus)
assert {checkboxes_state, agent} == agent |> ExampleAgent1.agent_state_pop(:checkboxes)
assert %{buttons: buttons_state, menus: 42} == agent |> Agent.get(fn s -> s end)
assert 99 == agent |> ExampleAgent1.agent_state_get(:some_other_key, 99)
end
end
| 32.883117 | 96 | 0.682859 |
ff66197dbc0ee168b6ea8269e9f3e23032f07fdc | 662 | exs | Elixir | apps/definition_schedule/test/schedule_test.exs | kennyatpillar/hindsight | e90e2150a14218e5d6fdf5874f57eb055fd2dd07 | [
"Apache-2.0"
] | null | null | null | apps/definition_schedule/test/schedule_test.exs | kennyatpillar/hindsight | e90e2150a14218e5d6fdf5874f57eb055fd2dd07 | [
"Apache-2.0"
] | null | null | null | apps/definition_schedule/test/schedule_test.exs | kennyatpillar/hindsight | e90e2150a14218e5d6fdf5874f57eb055fd2dd07 | [
"Apache-2.0"
] | null | null | null | defmodule ScheduleTest do
use Checkov
doctest Schedule
describe "new/1" do
data_test "validates #{field} against bad input" do
input = put_in(%{}, [field], value)
assert {:error, [%{input: value, path: [field]} | _]} = Schedule.new(input)
where [
[:field, :value],
[:version, "1"],
[:id, ""],
[:dataset_id, ""],
[:dataset_id, nil],
[:subset_id, ""],
[:subset_id, nil],
[:cron, ""],
[:cron, nil],
[:compaction_cron, ""],
[:compaction_cron, nil],
[:extract, %{}],
[:transform, nil],
[:load, nil]
]
end
end
end
| 22.827586 | 81 | 0.480363 |
ff6640ad5d52556f31945c718dff88517142b94d | 53,865 | exs | Elixir | lib/elixir/test/elixir/typespec_test.exs | samgaw/elixir | 92e8263102d95281a5aa7850b747636805f13fc9 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/typespec_test.exs | samgaw/elixir | 92e8263102d95281a5aa7850b747636805f13fc9 | [
"Apache-2.0"
] | 1 | 2021-10-21T08:22:30.000Z | 2021-10-21T08:22:30.000Z | lib/elixir/test/elixir/typespec_test.exs | samgaw/elixir | 92e8263102d95281a5aa7850b747636805f13fc9 | [
"Apache-2.0"
] | null | null | null | Code.require_file("test_helper.exs", __DIR__)
# Holds tests for both Kernel.Typespec and Code.Typespec
defmodule TypespecTest do
use ExUnit.Case, async: true
alias TypespecTest.TypespecSample
defstruct [:hello]
defmacrop test_module(do: block) do
quote do
{:module, _, bytecode, _} =
defmodule TypespecSample do
unquote(block)
end
:code.delete(TypespecSample)
:code.purge(TypespecSample)
bytecode
end
end
defp types(bytecode) do
bytecode
|> Code.Typespec.fetch_types()
|> elem(1)
|> Enum.sort()
end
@skip_specs [__info__: 1]
defp specs(bytecode) do
bytecode
|> Code.Typespec.fetch_specs()
|> elem(1)
|> Enum.reject(fn {sign, _} -> sign in @skip_specs end)
|> Enum.sort()
end
defp callbacks(bytecode) do
bytecode
|> Code.Typespec.fetch_callbacks()
|> elem(1)
|> Enum.sort()
end
describe "Kernel.Typespec errors" do
test "invalid type specification" do
assert_raise CompileError, ~r"invalid type specification: my_type = 1", fn ->
test_module do
@type my_type = 1
end
end
end
test "unexpected expression in typespec" do
assert_raise CompileError, ~r"unexpected expression in typespec: \"foobar\"", fn ->
test_module do
@type my_type :: "foobar"
end
end
end
test "invalid function specification" do
assert_raise CompileError, ~r"invalid type specification: \"not a spec\"", fn ->
test_module do
@spec "not a spec"
end
end
assert_raise CompileError, ~r"invalid type specification: 1 :: 2", fn ->
test_module do
@spec 1 :: 2
end
end
end
test "undefined type" do
assert_raise CompileError, ~r"type foo/0 undefined", fn ->
test_module do
@type omg :: foo
end
end
assert_raise CompileError, ~r"type foo/2 undefined", fn ->
test_module do
@type omg :: foo(atom, integer)
end
end
assert_raise CompileError, ~r"type bar/0 undefined", fn ->
test_module do
@spec foo(bar, integer) :: {atom, integer}
def foo(var1, var2), do: {var1, var2}
end
end
assert_raise CompileError, ~r"type foo/0 undefined", fn ->
test_module do
@type omg :: __MODULE__.foo()
end
end
end
test "redefined type" do
assert_raise CompileError,
~r"type foo/0 is already defined in test/elixir/typespec_test.exs:110",
fn ->
test_module do
@type foo :: atom
@type foo :: integer
end
end
assert_raise CompileError,
~r"type foo/2 is already defined in test/elixir/typespec_test.exs:120",
fn ->
test_module do
@type foo :: atom
@type foo(var1, var2) :: {var1, var2}
@type foo(x, y) :: {x, y}
end
end
assert_raise CompileError,
~r"type foo/0 is already defined in test/elixir/typespec_test.exs:129",
fn ->
test_module do
@type foo :: atom
@typep foo :: integer
end
end
end
test "type variable unused (singleton type variable)" do
assert_raise CompileError, ~r"type variable x is used only once", fn ->
test_module do
@type foo(x) :: integer
end
end
end
test "type variable starting with underscore" do
test_module do
assert @type(foo(_hello) :: integer) == :ok
end
end
test "type variable named _" do
assert_raise CompileError, ~r"type variable '_' is invalid", fn ->
test_module do
@type foo(_) :: integer
end
end
assert_raise CompileError, ~r"type variable '_' is invalid", fn ->
test_module do
@type foo(_, _) :: integer
end
end
end
test "spec for undefined function" do
assert_raise CompileError, ~r"spec for undefined function omg/0", fn ->
test_module do
@spec omg :: atom
end
end
end
test "spec variable used only once (singleton type variable)" do
assert_raise CompileError, ~r"type variable x is used only once", fn ->
test_module do
@spec foo(x, integer) :: integer when x: var
def foo(x, y), do: x + y
end
end
end
test "invalid optional callback" do
assert_raise CompileError, ~r"invalid optional callback :foo", fn ->
test_module do
@optional_callbacks :foo
end
end
end
test "unknown optional callback" do
assert_raise CompileError, ~r"unknown callback foo/1 given as optional callback", fn ->
test_module do
@optional_callbacks foo: 1
end
end
end
test "repeated optional callback" do
message = ~r"foo/1 has been specified as optional callback more than once"
assert_raise CompileError, message, fn ->
test_module do
@callback foo(:ok) :: :ok
@optional_callbacks foo: 1, foo: 1
end
end
end
test "behaviour_info/1 explicitly defined alongside @callback/@macrocallback" do
message = ~r"cannot define @callback attribute for foo/1 when behaviour_info/1"
assert_raise CompileError, message, fn ->
test_module do
@callback foo(:ok) :: :ok
def behaviour_info(_), do: []
end
end
message = ~r"cannot define @macrocallback attribute for foo/1 when behaviour_info/1"
assert_raise CompileError, message, fn ->
test_module do
@macrocallback foo(:ok) :: :ok
def behaviour_info(_), do: []
end
end
end
test "default is not supported" do
assert_raise ArgumentError, fn ->
test_module do
@callback hello(num \\ 0 :: integer) :: integer
end
end
assert_raise ArgumentError, fn ->
test_module do
@callback hello(num :: integer \\ 0) :: integer
end
end
assert_raise ArgumentError, fn ->
test_module do
@macrocallback hello(num \\ 0 :: integer) :: Macro.t()
end
end
assert_raise ArgumentError, fn ->
test_module do
@macrocallback hello(num :: integer \\ 0) :: Macro.t()
end
end
assert_raise ArgumentError, fn ->
test_module do
@spec hello(num \\ 0 :: integer) :: integer
end
end
assert_raise ArgumentError, fn ->
test_module do
@spec hello(num :: integer \\ 0) :: integer
end
end
end
test "@spec shows readable error message when return type is missing" do
message = ~r"type specification missing return type: my_fun\(integer\)"
assert_raise CompileError, message, fn ->
test_module do
@spec my_fun(integer)
end
end
end
end
describe "Kernel.Typespec definitions" do
test "typespec declarations return :ok" do
test_module do
def foo(), do: nil
assert @type(foo :: any()) == :ok
assert @typep(foop :: any()) == :ok
assert @spec(foo() :: nil) == :ok
assert @opaque(my_type :: atom) == :ok
assert @callback(foo(foop) :: integer) == :ok
assert @macrocallback(foo(integer) :: integer) == :ok
end
end
test "@type with a single type" do
bytecode =
test_module do
@type my_type :: term
end
assert [type: {:my_type, {:type, _, :term, []}, []}] = types(bytecode)
end
test "@type with an atom/alias" do
bytecode =
test_module do
@type foo :: :foo
@type bar :: Bar
end
assert [
type: {:bar, {:atom, _, Bar}, []},
type: {:foo, {:atom, _, :foo}, []}
] = types(bytecode)
end
test "@type with an integer" do
bytecode =
test_module do
@type pos :: 10
@type neg :: -10
end
assert [
type: {:neg, {:op, _, :-, {:integer, _, 10}}, []},
type: {:pos, {:integer, _, 10}, []}
] = types(bytecode)
end
test "@type with a tuple" do
bytecode =
test_module do
@type tup :: tuple()
@type one :: {123}
end
assert [
type: {:one, {:type, _, :tuple, [{:integer, _, 123}]}, []},
type: {:tup, {:type, _, :tuple, :any}, []}
] = types(bytecode)
end
test "@type with a remote type" do
bytecode =
test_module do
@type my_type :: Remote.Some.type()
@type my_type_arg :: Remote.type(integer)
end
assert [type: my_type, type: my_type_arg] = types(bytecode)
assert {:my_type, type, []} = my_type
assert {:remote_type, _, [{:atom, _, Remote.Some}, {:atom, _, :type}, []]} = type
assert {:my_type_arg, type, []} = my_type_arg
assert {:remote_type, _, args} = type
assert [{:atom, _, Remote}, {:atom, _, :type}, [{:type, _, :integer, []}]] = args
end
test "@type with a binary" do
bytecode =
test_module do
@type bin :: binary
@type empty :: <<>>
@type size :: <<_::3>>
@type unit :: <<_::_*8>>
@type size_and_unit :: <<_::3, _::_*8>>
end
assert [
type: {:bin, {:type, _, :binary, []}, []},
type: {:empty, {:type, _, :binary, [{:integer, _, 0}, {:integer, _, 0}]}, []},
type: {:size, {:type, _, :binary, [{:integer, _, 3}, {:integer, _, 0}]}, []},
type:
{:size_and_unit, {:type, _, :binary, [{:integer, _, 3}, {:integer, _, 8}]}, []},
type: {:unit, {:type, _, :binary, [{:integer, _, 0}, {:integer, _, 8}]}, []}
] = types(bytecode)
end
test "@type with invalid binary spec" do
assert_raise CompileError, ~r"invalid binary specification", fn ->
test_module do
@type my_type :: <<_::3*8>>
end
end
assert_raise CompileError, ~r"invalid binary specification", fn ->
test_module do
@type my_type :: <<_::atom>>
end
end
assert_raise CompileError, ~r"invalid binary specification", fn ->
test_module do
@type my_type :: <<_::integer>>
end
end
assert_raise CompileError, ~r"invalid binary specification", fn ->
test_module do
@type my_type :: <<_::(-4)>>
end
end
assert_raise CompileError, ~r"invalid binary specification", fn ->
test_module do
@type my_type :: <<_::3, _::_*atom>>
end
end
assert_raise CompileError, ~r"invalid binary specification", fn ->
test_module do
@type my_type :: <<_::3, _::_*(-8)>>
end
end
assert_raise CompileError, ~r"invalid binary specification", fn ->
test_module do
@type my_type :: <<_::3, _::_*257>>
end
end
end
test "@type with a range op" do
bytecode =
test_module do
@type range1 :: 1..10
@type range2 :: -1..1
end
assert [
{:type, {:range1, {:type, _, :range, range1_args}, []}},
{:type, {:range2, {:type, _, :range, range2_args}, []}}
] = types(bytecode)
assert [{:integer, _, 1}, {:integer, _, 10}] = range1_args
assert [{:op, _, :-, {:integer, _, 1}}, {:integer, _, 1}] = range2_args
end
test "@type with invalid range" do
assert_raise CompileError, ~r"invalid range specification", fn ->
test_module do
@type my_type :: atom..10
end
end
end
test "@type with a keyword map" do
bytecode =
test_module do
@type my_type :: %{hello: :world}
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :map, [arg]} = type
assert {:type, _, :map_field_exact, [{:atom, _, :hello}, {:atom, _, :world}]} = arg
end
test "@type with a map" do
bytecode =
test_module do
@type my_type :: %{required(:a) => :b, optional(:c) => :d}
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :map, [arg1, arg2]} = type
assert {:type, _, :map_field_exact, [{:atom, _, :a}, {:atom, _, :b}]} = arg1
assert {:type, _, :map_field_assoc, [{:atom, _, :c}, {:atom, _, :d}]} = arg2
end
test "@type with a struct" do
bytecode =
test_module do
defstruct hello: nil, other: nil
@type my_type :: %TypespecSample{hello: :world}
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :map, [struct, arg1, arg2]} = type
assert {:type, _, :map_field_exact, struct_args} = struct
assert [{:atom, _, :__struct__}, {:atom, _, TypespecSample}] = struct_args
assert {:type, _, :map_field_exact, [{:atom, _, :hello}, {:atom, _, :world}]} = arg1
assert {:type, _, :map_field_exact, [{:atom, _, :other}, {:type, _, :term, []}]} = arg2
end
@fields Enum.map(10..42, &{:"f#{&1}", :ok})
test "@type with a large struct" do
bytecode =
test_module do
defstruct unquote(@fields)
@type my_type :: %TypespecSample{unquote_splicing(@fields)}
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :map, [struct, arg1, arg2 | _]} = type
assert {:type, _, :map_field_exact, struct_args} = struct
assert [{:atom, _, :__struct__}, {:atom, _, TypespecSample}] = struct_args
assert {:type, _, :map_field_exact, [{:atom, _, :f10}, {:atom, _, :ok}]} = arg1
assert {:type, _, :map_field_exact, [{:atom, _, :f11}, {:atom, _, :ok}]} = arg2
end
test "@type with struct does not @enforce_keys" do
bytecode =
test_module do
@enforce_keys [:other]
defstruct hello: nil, other: nil
@type my_type :: %TypespecSample{hello: :world}
end
assert [type: {:my_type, _type, []}] = types(bytecode)
end
test "@type with undefined struct" do
assert_raise CompileError, ~r"ThisModuleDoesNotExist.__struct__/0 is undefined", fn ->
test_module do
@type my_type :: %ThisModuleDoesNotExist{}
end
end
assert_raise CompileError, ~r"cannot access struct TypespecTest.TypespecSample", fn ->
test_module do
@type my_type :: %TypespecSample{}
end
end
end
test "@type with a struct with undefined field" do
assert_raise CompileError, ~r"undefined field :no_field on struct TypespecSample", fn ->
test_module do
defstruct [:hello, :eric]
@type my_type :: %TypespecSample{no_field: :world}
end
end
end
test "@type when overriding Elixir built-in" do
assert_raise CompileError, ~r"type struct/0 is a built-in type", fn ->
test_module do
@type struct :: :oops
end
end
end
test "@type when overriding Erlang built-in" do
assert_raise CompileError, ~r"type list/0 is a built-in type", fn ->
test_module do
@type list :: :oops
end
end
end
test "@type with public record" do
bytecode =
test_module do
require Record
Record.defrecord(:timestamp, date: 1, time: 2)
@type my_type :: record(:timestamp, time: :foo)
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :tuple, [timestamp, term, foo]} = type
assert {:atom, 0, :timestamp} = timestamp
assert {:ann_type, 0, [{:var, 0, :date}, {:type, 0, :term, []}]} = term
assert {:ann_type, 0, [{:var, 0, :time}, {:atom, 0, :foo}]} = foo
end
test "@type with private record" do
bytecode =
test_module do
require Record
Record.defrecordp(:timestamp, date: 1, time: 2)
@type my_type :: record(:timestamp, time: :foo)
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :tuple, args} = type
assert [
{:atom, 0, :timestamp},
{:ann_type, 0, [{:var, 0, :date}, {:type, 0, :term, []}]},
{:ann_type, 0, [{:var, 0, :time}, {:atom, 0, :foo}]}
] = args
end
test "@type with named record" do
bytecode =
test_module do
require Record
Record.defrecord(:timestamp, :my_timestamp, date: 1, time: 2)
@type my_type :: record(:timestamp, time: :foo)
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :tuple, [my_timestamp, term, _foo]} = type
assert {:atom, 0, :my_timestamp} = my_timestamp
assert {:ann_type, 0, [{:var, 0, :date}, {:type, 0, :term, []}]} = term
assert {:ann_type, 0, [{:var, 0, :time}, {:atom, 0, :foo}]}
end
test "@type with undefined record" do
assert_raise CompileError, ~r"unknown record :this_record_does_not_exist", fn ->
test_module do
@type my_type :: record(:this_record_does_not_exist, [])
end
end
end
test "@type with a record with undefined field" do
assert_raise CompileError, ~r"undefined field no_field on record :timestamp", fn ->
test_module do
require Record
Record.defrecord(:timestamp, date: 1, time: 2)
@type my_type :: record(:timestamp, no_field: :foo)
end
end
end
test "@type with a record which declares the name as the type `atom` rather than an atom literal" do
assert_raise CompileError, ~r"expected the record name to be an atom literal", fn ->
test_module do
@type my_type :: record(atom, field: :foo)
end
end
end
test "@type can be named record" do
bytecode =
test_module do
@type record :: binary
@spec foo?(record) :: boolean
def foo?(_), do: true
end
assert [type: {:record, {:type, _, :binary, []}, []}] = types(bytecode)
end
test "@type with an invalid map notation" do
assert_raise CompileError, ~r"invalid map specification", fn ->
test_module do
@type content :: %{atom | String.t() => term}
end
end
end
test "@type with list shortcuts" do
bytecode =
test_module do
@type my_type :: []
@type my_type1 :: [integer]
@type my_type2 :: [integer, ...]
end
assert [
type: {:my_type, {:type, _, nil, []}, []},
type: {:my_type1, {:type, _, :list, [{:type, _, :integer, []}]}, []},
type: {:my_type2, {:type, _, :nonempty_list, [{:type, _, :integer, []}]}, []}
] = types(bytecode)
end
test "@type with a fun" do
bytecode =
test_module do
@type my_type :: (... -> any)
end
assert [type: {:my_type, {:type, _, :fun, []}, []}] = types(bytecode)
end
test "@type with a fun with multiple arguments and return type" do
bytecode =
test_module do
@type my_type :: (integer, integer -> integer)
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :fun, [args, return_type]} = type
assert {:type, _, :product, [{:type, _, :integer, []}, {:type, _, :integer, []}]} = args
assert {:type, _, :integer, []} = return_type
end
test "@type with a fun with no arguments and return type" do
bytecode =
test_module do
@type my_type :: (() -> integer)
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :fun, [{:type, _, :product, []}, {:type, _, :integer, []}]} = type
end
test "@type with a fun with any arity and return type" do
bytecode =
test_module do
@type my_type :: (... -> integer)
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :fun, [{:type, _, :any}, {:type, _, :integer, []}]} = type
end
test "@type with a union" do
bytecode =
test_module do
@type my_type :: integer | charlist | atom
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :union, [integer, charlist, atom]} = type
assert {:type, _, :integer, []} = integer
assert {:remote_type, _, [{:atom, _, :elixir}, {:atom, _, :charlist}, []]} = charlist
assert {:type, _, :atom, []} = atom
end
test "@type with keywords" do
bytecode =
test_module do
@type my_type :: [first: integer, step: integer, last: integer]
end
assert [type: {:my_type, type, []}] = types(bytecode)
assert {:type, _, :list, [{:type, _, :union, union_types}]} = type
assert [
{:type, _, :tuple, [{:atom, _, :first}, {:type, _, :integer, []}]},
{:type, _, :tuple, [{:atom, _, :step}, {:type, _, :integer, []}]},
{:type, _, :tuple, [{:atom, _, :last}, {:type, _, :integer, []}]}
] = union_types
end
test "@type with parameters" do
bytecode =
test_module do
@type my_type(x) :: x
@type my_type1(x) :: list(x)
@type my_type2(x, y) :: {x, y}
end
assert [
type: {:my_type, {:var, _, :x}, [{:var, _, :x}]},
type: {:my_type1, {:type, _, :list, [{:var, _, :x}]}, [{:var, _, :x}]},
type: {:my_type2, my_type2, [{:var, _, :x}, {:var, _, :y}]}
] = types(bytecode)
assert {:type, _, :tuple, [{:var, _, :x}, {:var, _, :y}]} = my_type2
end
test "@type with annotations" do
bytecode =
test_module do
@type my_type :: named :: integer
@type my_type1 :: (a :: integer -> integer)
end
assert [type: {:my_type, my_type, []}, type: {:my_type1, my_type1, []}] = types(bytecode)
assert {:ann_type, _, [{:var, _, :named}, {:type, _, :integer, []}]} = my_type
assert {:type, _, :fun, [fun_args, fun_return]} = my_type1
assert {:type, _, :product, [{:ann_type, _, [a, {:type, _, :integer, []}]}]} = fun_args
assert {:var, _, :a} = a
assert {:type, _, :integer, []} = fun_return
end
test "@type unquote fragment" do
quoted =
quote unquote: false do
name = :my_type
type = :foo
@type unquote(name)() :: unquote(type)
end
bytecode =
test_module do
Module.eval_quoted(__MODULE__, quoted)
end
assert [type: {:my_type, {:atom, _, :foo}, []}] = types(bytecode)
end
test "@type with module attributes" do
bytecode =
test_module do
@keyword Keyword
@type kw :: @keyword.t
@type kw(value) :: @keyword.t(value)
end
assert [type: {:kw, kw, _}, type: {:kw, kw_with_value, [{:var, _, :value}]}] =
types(bytecode)
assert {:remote_type, _, [{:atom, _, Keyword}, {:atom, _, :t}, []]} = kw
assert {:remote_type, _, kw_with_value_args} = kw_with_value
assert [{:atom, _, Keyword}, {:atom, _, :t}, [{:var, _, :value}]] = kw_with_value_args
end
test "@type with a reserved signature" do
assert_raise CompileError,
~r"type required\/1 is a reserved type and it cannot be defined",
fn ->
test_module do
@type required(arg) :: any()
end
end
assert_raise CompileError,
~r"type optional\/1 is a reserved type and it cannot be defined",
fn ->
test_module do
@type optional(arg) :: any()
end
end
assert_raise CompileError,
~r"type required\/1 is a reserved type and it cannot be defined",
fn ->
test_module do
@typep required(arg) :: any()
end
end
assert_raise CompileError,
~r"type optional\/1 is a reserved type and it cannot be defined",
fn ->
test_module do
@typep optional(arg) :: any()
end
end
assert_raise CompileError,
~r"type required\/1 is a reserved type and it cannot be defined",
fn ->
test_module do
@opaque required(arg) :: any()
end
end
assert_raise CompileError,
~r"type optional\/1 is a reserved type and it cannot be defined",
fn ->
test_module do
@opaque optional(arg) :: any()
end
end
end
test "invalid remote @type with module attribute that does not evaluate to a module" do
assert_raise CompileError, ~r/\(@foo is "bar"\)/, fn ->
test_module do
@foo "bar"
@type t :: @foo.t
end
end
end
test "defines_type?" do
test_module do
@type my_type :: tuple
@type my_type(a) :: [a]
assert Kernel.Typespec.defines_type?(__MODULE__, {:my_type, 0})
assert Kernel.Typespec.defines_type?(__MODULE__, {:my_type, 1})
refute Kernel.Typespec.defines_type?(__MODULE__, {:my_type, 2})
end
end
test "spec_to_callback/2" do
bytecode =
test_module do
@spec foo() :: term()
def foo(), do: :ok
Kernel.Typespec.spec_to_callback(__MODULE__, {:foo, 0})
end
assert specs(bytecode) == callbacks(bytecode)
end
test "@opaque" do
bytecode =
test_module do
@opaque my_type(x) :: x
end
assert [opaque: {:my_type, {:var, _, :x}, [{:var, _, :x}]}] = types(bytecode)
end
test "@spec" do
bytecode =
test_module do
def my_fun1(x), do: x
def my_fun2(), do: :ok
def my_fun3(x, y), do: {x, y}
def my_fun4(x), do: x
@spec my_fun1(integer) :: integer
@spec my_fun2() :: integer
@spec my_fun3(integer, integer) :: {integer, integer}
@spec my_fun4(x :: integer) :: integer
end
assert [my_fun1, my_fun2, my_fun3, my_fun4] = specs(bytecode)
assert {{:my_fun1, 1}, [{:type, _, :fun, args}]} = my_fun1
assert [{:type, _, :product, [{:type, _, :integer, []}]}, {:type, _, :integer, []}] = args
assert {{:my_fun2, 0}, [{:type, _, :fun, args}]} = my_fun2
assert [{:type, _, :product, []}, {:type, _, :integer, []}] = args
assert {{:my_fun3, 2}, [{:type, _, :fun, [arg1, arg2]}]} = my_fun3
assert {:type, _, :product, [{:type, _, :integer, []}, {:type, _, :integer, []}]} = arg1
assert {:type, _, :tuple, [{:type, _, :integer, []}, {:type, _, :integer, []}]} = arg2
assert {{:my_fun4, 1}, [{:type, _, :fun, args}]} = my_fun4
assert [x, {:type, _, :integer, []}] = args
assert {:type, _, :product, [{:ann_type, _, [{:var, _, :x}, {:type, _, :integer, []}]}]} = x
end
test "@spec with vars matching built-ins" do
bytecode =
test_module do
def my_fun1(x), do: x
def my_fun2(x), do: x
@spec my_fun1(tuple) :: tuple
@spec my_fun2(tuple) :: tuple when tuple: {integer, integer}
end
assert [my_fun1, my_fun2] = specs(bytecode)
assert {{:my_fun1, 1}, [{:type, _, :fun, args}]} = my_fun1
assert [{:type, _, :product, [{:type, _, :tuple, :any}]}, {:type, _, :tuple, :any}] = args
assert {{:my_fun2, 1}, [{:type, _, :bounded_fun, args}]} = my_fun2
assert [type, _] = args
assert {:type, _, :fun, [{:type, _, :product, [{:var, _, :tuple}]}, {:var, _, :tuple}]} =
type
end
test "@spec with guards" do
bytecode =
test_module do
def my_fun1(x), do: x
@spec my_fun1(x) :: boolean when x: integer
def my_fun2(x), do: x
@spec my_fun2(x) :: x when x: var
def my_fun3(_x, y), do: y
@spec my_fun3(x, y) :: y when y: x, x: var
end
assert [my_fun1, my_fun2, my_fun3] = specs(bytecode)
assert {{:my_fun1, 1}, [{:type, _, :bounded_fun, args}]} = my_fun1
assert [{:type, _, :fun, [product, {:type, _, :boolean, []}]}, constraints] = args
assert {:type, _, :product, [{:var, _, :x}]} = product
assert [{:type, _, :constraint, subtype}] = constraints
assert [{:atom, _, :is_subtype}, [{:var, _, :x}, {:type, _, :integer, []}]] = subtype
assert {{:my_fun2, 1}, [{:type, _, :fun, args}]} = my_fun2
assert [{:type, _, :product, [{:var, _, :x}]}, {:var, _, :x}] = args
assert {{:my_fun3, 2}, [{:type, _, :bounded_fun, args}]} = my_fun3
assert [{:type, _, :fun, fun_type}, [{:type, _, :constraint, constraint_type}]] = args
assert [{:type, _, :product, [{:var, _, :x}, {:var, _, :y}]}, {:var, _, :y}] = fun_type
assert [{:atom, _, :is_subtype}, [{:var, _, :y}, {:var, _, :x}]] = constraint_type
end
test "@type, @opaque, and @typep as module attributes" do
defmodule TypeModuleAttributes do
@type type1 :: boolean
@opaque opaque1 :: boolean
@typep typep1 :: boolean
def type1, do: @type
def opaque1, do: @opaque
def typep1, do: @typep
@type type2 :: atom
@type type3 :: pid
@opaque opaque2 :: atom
@opaque opaque3 :: pid
@typep typep2 :: atom
def type2, do: @type
def opaque2, do: @opaque
def typep2, do: @typep
# Avoid unused warnings
@spec foo(typep1) :: typep2
def foo(_x), do: :ok
end
assert [
{:type, {:"::", _, [{:type1, _, _}, {:boolean, _, _}]}, {TypeModuleAttributes, _}}
] = TypeModuleAttributes.type1()
assert [
{:type, {:"::", _, [{:type3, _, _}, {:pid, _, _}]}, {TypeModuleAttributes, _}},
{:type, {:"::", _, [{:type2, _, _}, {:atom, _, _}]}, {TypeModuleAttributes, _}},
{:type, {:"::", _, [{:type1, _, _}, {:boolean, _, _}]}, {TypeModuleAttributes, _}}
] = TypeModuleAttributes.type2()
assert [
{:opaque, {:"::", _, [{:opaque1, _, _}, {:boolean, _, _}]},
{TypeModuleAttributes, _}}
] = TypeModuleAttributes.opaque1()
assert [
{:opaque, {:"::", _, [{:opaque3, _, _}, {:pid, _, _}]}, {TypeModuleAttributes, _}},
{:opaque, {:"::", _, [{:opaque2, _, _}, {:atom, _, _}]},
{TypeModuleAttributes, _}},
{:opaque, {:"::", _, [{:opaque1, _, _}, {:boolean, _, _}]},
{TypeModuleAttributes, _}}
] = TypeModuleAttributes.opaque2()
assert [
{:typep, {:"::", _, [{:typep1, _, _}, {:boolean, _, _}]},
{TypeModuleAttributes, _}}
] = TypeModuleAttributes.typep1()
assert [
{:typep, {:"::", _, [{:typep2, _, _}, {:atom, _, _}]}, {TypeModuleAttributes, _}},
{:typep, {:"::", _, [{:typep1, _, _}, {:boolean, _, _}]},
{TypeModuleAttributes, _}}
] = TypeModuleAttributes.typep2()
after
:code.delete(TypeModuleAttributes)
:code.purge(TypeModuleAttributes)
end
test "@spec, @callback, and @macrocallback as module attributes" do
defmodule SpecModuleAttributes do
@callback callback1 :: integer
@macrocallback macrocallback1 :: integer
@spec spec1 :: boolean
def spec1, do: @spec
@callback callback2 :: var when var: boolean
@macrocallback macrocallback2 :: var when var: boolean
@spec spec2 :: atom
def spec2, do: @spec
@spec spec3 :: pid
def spec3, do: :ok
def spec4, do: @spec
def callback, do: @callback
def macrocallback, do: @macrocallback
end
assert [
{:spec, {:"::", _, [{:spec1, _, _}, {:boolean, _, _}]}, {SpecModuleAttributes, _}}
] = SpecModuleAttributes.spec1()
assert [
{:spec, {:"::", _, [{:spec2, _, _}, {:atom, _, _}]}, {SpecModuleAttributes, _}},
{:spec, {:"::", _, [{:spec1, _, _}, {:boolean, _, _}]}, {SpecModuleAttributes, _}}
] = SpecModuleAttributes.spec2()
assert [
{:spec, {:"::", _, [{:spec3, _, _}, {:pid, _, _}]}, {SpecModuleAttributes, _}},
{:spec, {:"::", _, [{:spec2, _, _}, {:atom, _, _}]}, {SpecModuleAttributes, _}},
{:spec, {:"::", _, [{:spec1, _, _}, {:boolean, _, _}]}, {SpecModuleAttributes, _}}
] = SpecModuleAttributes.spec4()
assert [
{:callback,
{:when, _,
[{:"::", _, [{:callback2, _, _}, {:var, _, _}]}, [var: {:boolean, _, _}]]},
{SpecModuleAttributes, _}},
{:callback, {:"::", _, [{:callback1, _, _}, {:integer, _, _}]},
{SpecModuleAttributes, _}}
] = SpecModuleAttributes.callback()
assert [
{:macrocallback,
{:when, _,
[{:"::", _, [{:macrocallback2, _, _}, {:var, _, _}]}, [var: {:boolean, _, _}]]},
{SpecModuleAttributes, _}},
{:macrocallback, {:"::", _, [{:macrocallback1, _, _}, {:integer, _, _}]},
{SpecModuleAttributes, _}}
] = SpecModuleAttributes.macrocallback()
after
:code.delete(SpecModuleAttributes)
:code.purge(SpecModuleAttributes)
end
test "@callback" do
bytecode =
test_module do
@callback my_fun(integer) :: integer
@callback my_fun(list) :: list
@callback my_fun() :: integer
@callback my_fun(integer, integer) :: {integer, integer}
end
assert [my_fun_0, my_fun_1, my_fun_2] = callbacks(bytecode)
assert {{:my_fun, 0}, [{:type, _, :fun, args}]} = my_fun_0
assert [{:type, _, :product, []}, {:type, _, :integer, []}] = args
assert {{:my_fun, 1}, [clause1, clause2]} = my_fun_1
assert {:type, _, :fun, args1} = clause1
assert [{:type, _, :product, [{:type, _, :integer, []}]}, {:type, _, :integer, []}] = args1
assert {:type, _, :fun, args2} = clause2
assert [{:type, _, :product, [{:type, _, :list, []}]}, {:type, _, :list, []}] = args2
assert {{:my_fun, 2}, [{:type, _, :fun, [args_type, return_type]}]} = my_fun_2
assert {:type, _, :product, [{:type, _, :integer, []}, {:type, _, :integer, []}]} =
args_type
assert {:type, _, :tuple, [{:type, _, :integer, []}, {:type, _, :integer, []}]} =
return_type
end
test "block handling" do
bytecode =
test_module do
@spec foo((() -> [integer])) :: integer
def foo(_), do: 1
end
assert [{{:foo, 1}, [{:type, _, :fun, [args, return]}]}] = specs(bytecode)
assert {:type, _, :product, [{:type, _, :fun, fun_args}]} = args
assert [{:type, _, :product, []}, {:type, _, :list, [{:type, _, :integer, []}]}] = fun_args
assert {:type, _, :integer, []} = return
end
end
describe "Code.Typespec" do
test "type_to_quoted" do
quoted =
Enum.sort([
quote(do: @type(tuple(arg) :: {:tuple, arg})),
quote(do: @type(with_ann() :: t :: atom())),
quote(do: @type(a_tuple() :: tuple())),
quote(do: @type(empty_tuple() :: {})),
quote(do: @type(one_tuple() :: {:foo})),
quote(do: @type(two_tuple() :: {:foo, :bar})),
quote(do: @type(custom_tuple() :: tuple(:foo))),
quote(do: @type(imm_type_1() :: 1)),
quote(do: @type(imm_type_2() :: :foo)),
quote(do: @type(simple_type() :: integer())),
quote(do: @type(param_type(p) :: [p])),
quote(do: @type(union_type() :: integer() | binary() | boolean())),
quote(do: @type(binary_type1() :: <<_::_*8>>)),
quote(do: @type(binary_type2() :: <<_::3>>)),
quote(do: @type(binary_type3() :: <<_::3, _::_*8>>)),
quote(do: @type(tuple_type() :: {integer()})),
quote(
do: @type(ftype() :: (() -> any()) | (() -> integer()) | (integer() -> integer()))
),
quote(do: @type(cl() :: charlist())),
quote(do: @type(st() :: struct())),
quote(do: @type(ab() :: as_boolean(term()))),
quote(do: @type(kw() :: keyword())),
quote(do: @type(kwt() :: keyword(term()))),
quote(do: @type(vaf() :: (... -> any()))),
quote(do: @type(rng() :: 1..10)),
quote(do: @type(opts() :: [first: integer(), step: integer(), last: integer()])),
quote(do: @type(ops() :: {+1, -1})),
quote(do: @type(map(arg) :: {:map, arg})),
quote(do: @type(a_map() :: map())),
quote(do: @type(empty_map() :: %{})),
quote(do: @type(my_map() :: %{hello: :world})),
quote(do: @type(my_req_map() :: %{required(0) => :foo})),
quote(do: @type(my_opt_map() :: %{optional(0) => :foo})),
quote(do: @type(my_struct() :: %TypespecTest{hello: :world})),
quote(do: @type(custom_map() :: map(:foo))),
quote(do: @type(list1() :: list())),
quote(do: @type(list2() :: [0])),
quote(do: @type(list3() :: [...])),
quote(do: @type(list4() :: [0, ...])),
quote(do: @type(nil_list() :: []))
])
bytecode =
test_module do
Module.eval_quoted(__MODULE__, quoted)
end
types = types(bytecode)
Enum.each(Enum.zip(types, quoted), fn {{:type, type}, definition} ->
ast = Code.Typespec.type_to_quoted(type)
assert Macro.to_string(quote(do: @type(unquote(ast)))) == Macro.to_string(definition)
end)
end
test "type_to_quoted for paren_type" do
type = {:my_type, {:paren_type, 0, [{:type, 0, :integer, []}]}, []}
assert Code.Typespec.type_to_quoted(type) ==
{:"::", [], [{:my_type, [], []}, {:integer, [line: 0], []}]}
end
test "spec_to_quoted" do
quoted =
Enum.sort([
quote(do: @spec(foo() :: integer())),
quote(do: @spec(foo() :: union())),
quote(do: @spec(foo() :: union(integer()))),
quote(do: @spec(foo() :: truly_union())),
quote(do: @spec(foo(union()) :: union())),
quote(do: @spec(foo(union(integer())) :: union(integer()))),
quote(do: @spec(foo(truly_union()) :: truly_union())),
quote(do: @spec(foo(atom()) :: integer() | [{}])),
quote(do: @spec(foo(arg) :: integer() when [arg: integer()])),
quote(do: @spec(foo(arg) :: arg when [arg: var])),
quote(do: @spec(foo(arg :: atom()) :: atom()))
])
bytecode =
test_module do
@type union :: any()
@type union(t) :: t
@type truly_union :: list | map | union
def foo(), do: 1
def foo(arg), do: arg
Module.eval_quoted(__MODULE__, quote(do: (unquote_splicing(quoted))))
end
specs =
Enum.flat_map(specs(bytecode), fn {{_, _}, specs} ->
Enum.map(specs, fn spec ->
quote(do: @spec(unquote(Code.Typespec.spec_to_quoted(:foo, spec))))
end)
end)
specs_with_quoted = specs |> Enum.sort() |> Enum.zip(quoted)
Enum.each(specs_with_quoted, fn {spec, definition} ->
assert Macro.to_string(spec) == Macro.to_string(definition)
end)
end
test "spec_to_quoted with maps with __struct__ key" do
defmodule A do
defstruct [:key]
end
defmodule B do
defstruct [:key]
end
bytecode =
test_module do
@spec single_struct(%A{}) :: :ok
def single_struct(arg), do: {:ok, arg}
@spec single_struct_key(%{__struct__: A}) :: :ok
def single_struct_key(arg), do: {:ok, arg}
@spec single_struct_key_type(%{__struct__: atom()}) :: :ok
def single_struct_key_type(arg), do: {:ok, arg}
@spec union_struct(%A{} | %B{}) :: :ok
def union_struct(arg), do: {:ok, arg}
@spec union_struct_key(%{__struct__: A | B}) :: :ok
def union_struct_key(arg), do: {:ok, arg}
@spec union_struct_key_type(%{__struct__: atom() | A | binary()}) :: :ok
def union_struct_key_type(arg), do: {:ok, arg}
end
[
{{:single_struct, 1}, [ast_single_struct]},
{{:single_struct_key, 1}, [ast_single_struct_key]},
{{:single_struct_key_type, 1}, [ast_single_struct_key_type]},
{{:union_struct, 1}, [ast_union_struct]},
{{:union_struct_key, 1}, [ast_union_struct_key]},
{{:union_struct_key_type, 1}, [ast_union_struct_key_type]}
] = specs(bytecode)
assert Code.Typespec.spec_to_quoted(:single_struct, ast_single_struct)
|> Macro.to_string() ==
"single_struct(%TypespecTest.A{key: term()}) :: :ok"
assert Code.Typespec.spec_to_quoted(:single_struct_key, ast_single_struct_key)
|> Macro.to_string() ==
"single_struct_key(%TypespecTest.A{}) :: :ok"
assert Code.Typespec.spec_to_quoted(:single_struct_key_type, ast_single_struct_key_type)
|> Macro.to_string() ==
"single_struct_key_type(%{__struct__: atom()}) :: :ok"
assert Code.Typespec.spec_to_quoted(:union_struct, ast_union_struct) |> Macro.to_string() ==
"union_struct(%TypespecTest.A{key: term()} | %TypespecTest.B{key: term()}) :: :ok"
assert Code.Typespec.spec_to_quoted(:union_struct_key, ast_union_struct_key)
|> Macro.to_string() ==
"union_struct_key(%{__struct__: TypespecTest.A | TypespecTest.B}) :: :ok"
assert Code.Typespec.spec_to_quoted(:union_struct_key_type, ast_union_struct_key_type)
|> Macro.to_string() ==
"union_struct_key_type(%{__struct__: atom() | TypespecTest.A | binary()}) :: :ok"
end
test "non-variables are given as arguments" do
msg = ~r/The type one_bad_variable\/1 has an invalid argument\(s\): String.t\(\)/
assert_raise CompileError, msg, fn ->
test_module do
@type one_bad_variable(String.t()) :: String.t()
end
end
msg = ~r/The type two_bad_variables\/2 has an invalid argument\(s\): :ok, Enum.t\(\)/
assert_raise CompileError, msg, fn ->
test_module do
@type two_bad_variables(:ok, Enum.t()) :: {:ok, []}
end
end
msg = ~r/The type one_bad_one_good\/2 has an invalid argument\(s\): \"\"/
assert_raise CompileError, msg, fn ->
test_module do
@type one_bad_one_good(input1, "") :: {:ok, input1}
end
end
end
test "retrieval invalid data" do
assert Code.Typespec.fetch_types(Unknown) == :error
assert Code.Typespec.fetch_specs(Unknown) == :error
end
# This is a test that implements all types specified in lib/elixir/pages/typespecs.md
test "documented types and their AST" do
defmodule SomeStruct do
defstruct [:key]
end
quoted =
Enum.sort([
## Basic types
quote(do: @type(basic_any() :: any())),
quote(do: @type(basic_none() :: none())),
quote(do: @type(basic_atom() :: atom())),
quote(do: @type(basic_map() :: map())),
quote(do: @type(basic_pid() :: pid())),
quote(do: @type(basic_port() :: port())),
quote(do: @type(basic_reference() :: reference())),
quote(do: @type(basic_struct() :: struct())),
quote(do: @type(basic_tuple() :: tuple())),
# Numbers
quote(do: @type(basic_float() :: float())),
quote(do: @type(basic_integer() :: integer())),
quote(do: @type(basic_neg_integer() :: neg_integer())),
quote(do: @type(basic_non_neg_integer() :: non_neg_integer())),
quote(do: @type(basic_pos_integer() :: pos_integer())),
# Lists
quote(do: @type(basic_list_type() :: list(integer()))),
quote(do: @type(basic_nonempty_list_type() :: nonempty_list(integer()))),
quote do
@type basic_maybe_improper_list_type() :: maybe_improper_list(integer(), atom())
end,
quote do
@type basic_nonempty_improper_list_type() :: nonempty_improper_list(integer(), atom())
end,
quote do
@type basic_nonempty_maybe_improper_list_type() ::
nonempty_maybe_improper_list(integer(), atom())
end,
## Literals
quote(do: @type(literal_atom() :: :atom)),
quote(do: @type(literal_integer() :: 1)),
quote(do: @type(literal_integers() :: 1..10)),
quote(do: @type(literal_empty_bitstring() :: <<>>)),
quote(do: @type(literal_size_0() :: <<_::0>>)),
quote(do: @type(literal_unit_1() :: <<_::_*1>>)),
quote(do: @type(literal_size_1_unit_8() :: <<_::100, _::_*256>>)),
quote(do: @type(literal_function_arity_any() :: (... -> integer()))),
quote(do: @type(literal_function_arity_0() :: (() -> integer()))),
quote(do: @type(literal_function_arity_2() :: (integer(), atom() -> integer()))),
quote(do: @type(literal_list_type() :: [integer()])),
quote(do: @type(literal_empty_list() :: [])),
quote(do: @type(literal_list_nonempty() :: [...])),
quote(do: @type(literal_nonempty_list_type() :: [atom(), ...])),
quote(do: @type(literal_keyword_list_fixed_key() :: [key: integer()])),
quote(do: @type(literal_keyword_list_fixed_key2() :: [{:key, integer()}])),
quote(do: @type(literal_keyword_list_type_key() :: [{binary(), integer()}])),
quote(do: @type(literal_empty_map() :: %{})),
quote(do: @type(literal_map_with_key() :: %{key: integer()})),
quote(
do: @type(literal_map_with_required_key() :: %{required(bitstring()) => integer()})
),
quote(
do: @type(literal_map_with_optional_key() :: %{optional(bitstring()) => integer()})
),
quote(do: @type(literal_struct_all_fields_any_type() :: %SomeStruct{})),
quote(do: @type(literal_struct_all_fields_key_type() :: %SomeStruct{key: integer()})),
quote(do: @type(literal_empty_tuple() :: {})),
quote(do: @type(literal_2_element_tuple() :: {1, 2})),
## Built-in types
quote(do: @type(built_in_term() :: term())),
quote(do: @type(built_in_arity() :: arity())),
quote(do: @type(built_in_as_boolean() :: as_boolean(:t))),
quote(do: @type(built_in_binary() :: binary())),
quote(do: @type(built_in_bitstring() :: bitstring())),
quote(do: @type(built_in_boolean() :: boolean())),
quote(do: @type(built_in_byte() :: byte())),
quote(do: @type(built_in_char() :: char())),
quote(do: @type(built_in_charlist() :: charlist())),
quote(do: @type(built_in_nonempty_charlist() :: nonempty_charlist())),
quote(do: @type(built_in_fun() :: fun())),
quote(do: @type(built_in_function() :: function())),
quote(do: @type(built_in_identifier() :: identifier())),
quote(do: @type(built_in_iodata() :: iodata())),
quote(do: @type(built_in_iolist() :: iolist())),
quote(do: @type(built_in_keyword() :: keyword())),
quote(do: @type(built_in_keyword_value_type() :: keyword(:t))),
quote(do: @type(built_in_list() :: list())),
quote(do: @type(built_in_nonempty_list() :: nonempty_list())),
quote(do: @type(built_in_maybe_improper_list() :: maybe_improper_list())),
quote(
do: @type(built_in_nonempty_maybe_improper_list() :: nonempty_maybe_improper_list())
),
quote(do: @type(built_in_mfa() :: mfa())),
quote(do: @type(built_in_module() :: module())),
quote(do: @type(built_in_no_return() :: no_return())),
quote(do: @type(built_in_node() :: node())),
quote(do: @type(built_in_number() :: number())),
quote(do: @type(built_in_struct() :: struct())),
quote(do: @type(built_in_timeout() :: timeout())),
## Remote types
quote(do: @type(remote_enum_t0() :: Enum.t())),
quote(do: @type(remote_keyword_t1() :: Keyword.t(integer())))
])
bytecode =
test_module do
Module.eval_quoted(__MODULE__, quoted)
end
types = types(bytecode)
Enum.each(Enum.zip(types, quoted), fn {{:type, type}, definition} ->
ast = Code.Typespec.type_to_quoted(type)
ast_string = Macro.to_string(quote(do: @type(unquote(ast))))
case type do
# These cases do not translate directly to their own string version.
{:basic_list_type, _, _} ->
assert ast_string == "@type basic_list_type() :: [integer()]"
{:basic_nonempty_list_type, _, _} ->
assert ast_string == "@type basic_nonempty_list_type() :: [integer(), ...]"
{:literal_empty_bitstring, _, _} ->
assert ast_string == "@type literal_empty_bitstring() :: <<_::0>>"
{:literal_keyword_list_fixed_key, _, _} ->
assert ast_string == "@type literal_keyword_list_fixed_key() :: [{:key, integer()}]"
{:literal_keyword_list_fixed_key2, _, _} ->
assert ast_string == "@type literal_keyword_list_fixed_key2() :: [{:key, integer()}]"
{:literal_struct_all_fields_any_type, _, _} ->
assert ast_string ==
"@type literal_struct_all_fields_any_type() :: %TypespecTest.SomeStruct{key: term()}"
{:literal_struct_all_fields_key_type, _, _} ->
assert ast_string ==
"@type literal_struct_all_fields_key_type() :: %TypespecTest.SomeStruct{key: integer()}"
{:built_in_fun, _, _} ->
assert ast_string == "@type built_in_fun() :: (... -> any())"
{:built_in_nonempty_list, _, _} ->
assert ast_string == "@type built_in_nonempty_list() :: [...]"
_ ->
assert ast_string == Macro.to_string(definition)
end
end)
end
end
describe "behaviour_info" do
defmodule SampleCallbacks do
@callback first(integer) :: integer
@callback foo(atom(), binary) :: binary
@callback bar(External.hello(), my_var :: binary) :: binary
@callback guarded(my_var) :: my_var when my_var: binary
@callback orr(atom | integer) :: atom
@callback literal(123, {atom}, :foo, [integer], true) :: atom
@macrocallback last(integer) :: Macro.t()
@macrocallback last() :: atom
@optional_callbacks bar: 2, last: 0
@optional_callbacks first: 1
end
test "defines callbacks" do
expected_callbacks = [
"MACRO-last": 1,
"MACRO-last": 2,
bar: 2,
first: 1,
foo: 2,
guarded: 1,
literal: 5,
orr: 1
]
assert Enum.sort(SampleCallbacks.behaviour_info(:callbacks)) == expected_callbacks
end
test "defines optional callbacks" do
assert Enum.sort(SampleCallbacks.behaviour_info(:optional_callbacks)) ==
["MACRO-last": 1, bar: 2, first: 1]
end
end
@tag tmp_dir: true
test "erlang module", c do
erlc(c, :typespec_test_mod, """
-module(typespec_test_mod).
-export([f/1]).
-export_type([t/1]).
-type t(X) :: list(X).
-spec f(X) -> X.
f(X) -> X.
""")
[type: type] = types(:typespec_test_mod)
line = 5
assert Code.Typespec.type_to_quoted(type) ==
{:"::", [], [{:t, [], [{:x, [line: line], nil}]}, [{:x, [line: line], nil}]]}
[{{:f, 1}, [spec]}] = specs(:typespec_test_mod)
line = 7
assert Code.Typespec.spec_to_quoted(:f, spec) ==
{:when, [line: line],
[
{:"::", [line: line],
[{:f, [line: line], [{:x, [line: line], nil}]}, {:x, [line: line], nil}]},
[x: {:var, [line: line], nil}]
]}
end
defp erlc(context, module, code) do
dir = context.tmp_dir
src_path = Path.join([dir, "#{module}.erl"])
src_path |> Path.dirname() |> File.mkdir_p!()
File.write!(src_path, code)
ebin_dir = Path.join(dir, "ebin")
File.mkdir_p!(ebin_dir)
{:ok, module} =
:compile.file(String.to_charlist(src_path), [
:debug_info,
outdir: String.to_charlist(ebin_dir)
])
true = Code.prepend_path(ebin_dir)
{:module, ^module} = :code.load_file(module)
ExUnit.Callbacks.on_exit(fn ->
:code.purge(module)
:code.delete(module)
File.rm_rf!(dir)
end)
:ok
end
end
| 34.070209 | 109 | 0.527652 |
ff665773cb5c6214eba9f395f172aa947c18f128 | 2,193 | ex | Elixir | elixir_grpc_bench/lib/helloworld.pb.ex | scala-steward/grpc_bench | da158cd3551309857aa54feebab1d3bad3993f14 | [
"MIT"
] | null | null | null | elixir_grpc_bench/lib/helloworld.pb.ex | scala-steward/grpc_bench | da158cd3551309857aa54feebab1d3bad3993f14 | [
"MIT"
] | null | null | null | elixir_grpc_bench/lib/helloworld.pb.ex | scala-steward/grpc_bench | da158cd3551309857aa54feebab1d3bad3993f14 | [
"MIT"
] | null | null | null | defmodule Helloworld.Hello.Pet.Color do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
@type t :: integer | :BLACK | :WHITE | :BLUE | :RED | :YELLOW | :GREEN
field :BLACK, 0
field :WHITE, 1
field :BLUE, 2
field :RED, 3
field :YELLOW, 4
field :GREEN, 5
end
defmodule Helloworld.Hello.Pet do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t(),
color: Helloworld.Hello.Pet.Color.t()
}
defstruct [:name, :color]
field :name, 1, type: :string
field :color, 2, type: Helloworld.Hello.Pet.Color, enum: true
end
defmodule Helloworld.Hello do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
choice: {atom, any},
name: String.t(),
d: float | :infinity | :negative_infinity | :nan,
f: float | :infinity | :negative_infinity | :nan,
b: boolean,
n: integer,
l: integer,
pets: [Helloworld.Hello.Pet.t()]
}
defstruct [:choice, :name, :d, :f, :b, :n, :l, :pets]
oneof :choice, 0
field :name, 1, type: :string
field :d, 2, type: :double
field :f, 3, type: :float
field :b, 4, type: :bool
field :n, 5, type: :int32
field :l, 6, type: :int64
field :c1, 7, type: :string, oneof: 0
field :c2, 8, type: :bool, oneof: 0
field :pets, 9, repeated: true, type: Helloworld.Hello.Pet
end
defmodule Helloworld.HelloRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
request: Helloworld.Hello.t() | nil
}
defstruct [:request]
field :request, 1, type: Helloworld.Hello
end
defmodule Helloworld.HelloReply do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
response: Helloworld.Hello.t() | nil
}
defstruct [:response]
field :response, 1, type: Helloworld.Hello
end
defmodule Helloworld.Greeter.Service do
@moduledoc false
use GRPC.Service, name: "helloworld.Greeter"
rpc :SayHello, Helloworld.HelloRequest, Helloworld.HelloReply
end
defmodule Helloworld.Greeter.Stub do
@moduledoc false
use GRPC.Stub, service: Helloworld.Greeter.Service
end
| 23.836957 | 72 | 0.638395 |
ff665b5a2f5bf3b88a68bfb9a37633aa19b0855e | 1,962 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/backend_bucket_cdn_policy_cache_key_policy.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/backend_bucket_cdn_policy_cache_key_policy.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/backend_bucket_cdn_policy_cache_key_policy.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.BackendBucketCdnPolicyCacheKeyPolicy do
@moduledoc """
Message containing what to include in the cache key for a request for Cloud CDN.
## Attributes
* `includeHttpHeaders` (*type:* `list(String.t)`, *default:* `nil`) - Allows HTTP request headers (by name) to be used in the cache key.
* `queryStringWhitelist` (*type:* `list(String.t)`, *default:* `nil`) - Names of query string parameters to include in cache keys. Default parameters are always included. '&' and '=' will be percent encoded and not treated as delimiters.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:includeHttpHeaders => list(String.t()) | nil,
:queryStringWhitelist => list(String.t()) | nil
}
field(:includeHttpHeaders, type: :list)
field(:queryStringWhitelist, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.BackendBucketCdnPolicyCacheKeyPolicy do
def decode(value, options) do
GoogleApi.Compute.V1.Model.BackendBucketCdnPolicyCacheKeyPolicy.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.BackendBucketCdnPolicyCacheKeyPolicy do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.24 | 241 | 0.743119 |
ff6690003a02f663b1ce850eab1477621f7ab543 | 521 | ex | Elixir | lib/trento/domain/host/events/host_registered.ex | trento-project/web | 3260b30c781bffbbb0e5205cd650966c4026b9ac | [
"Apache-2.0"
] | 1 | 2022-03-22T16:59:34.000Z | 2022-03-22T16:59:34.000Z | lib/trento/domain/host/events/host_registered.ex | trento-project/web | 3260b30c781bffbbb0e5205cd650966c4026b9ac | [
"Apache-2.0"
] | 24 | 2022-03-22T16:45:25.000Z | 2022-03-31T13:00:02.000Z | lib/trento/domain/host/events/host_registered.ex | trento-project/web | 3260b30c781bffbbb0e5205cd650966c4026b9ac | [
"Apache-2.0"
] | 1 | 2022-03-30T14:16:16.000Z | 2022-03-30T14:16:16.000Z | defmodule Trento.Domain.Events.HostRegistered do
@moduledoc """
This event is emitted when a host is registered.
"""
use Trento.Event
defevent do
field :host_id, Ecto.UUID
field :hostname, :string
field :ip_addresses, {:array, :string}
field :ssh_address, :string
field :agent_version, :string
field :cpu_count, :integer
field :total_memory_mb, :integer
field :socket_count, :integer
field :os_version, :string
field :heartbeat, Ecto.Enum, values: [:unknown]
end
end
| 24.809524 | 51 | 0.694818 |
ff6694e5121145000e31288f3a89a50fa2792d4a | 1,119 | exs | Elixir | config/config.exs | tsubery/seams | 99d08455a059770dabd2482dcf085d59a8d705bb | [
"Apache-2.0"
] | null | null | null | config/config.exs | tsubery/seams | 99d08455a059770dabd2482dcf085d59a8d705bb | [
"Apache-2.0"
] | null | null | null | config/config.exs | tsubery/seams | 99d08455a059770dabd2482dcf085d59a8d705bb | [
"Apache-2.0"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :seams, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:seams, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.096774 | 73 | 0.75067 |
ff6697c75f76860dcbdf46ff95d76ecfdaee13a7 | 4,081 | exs | Elixir | test/challenge_gov/submission_documents_test.exs | smartlogic/Challenge_gov | b4203d1fcfb742dd17ecfadb9e9c56ad836d4254 | [
"CC0-1.0"
] | 9 | 2020-02-26T20:24:38.000Z | 2022-03-22T21:14:52.000Z | test/challenge_gov/submission_documents_test.exs | smartlogic/Challenge_gov | b4203d1fcfb742dd17ecfadb9e9c56ad836d4254 | [
"CC0-1.0"
] | 15 | 2020-04-22T19:33:24.000Z | 2022-03-26T15:11:17.000Z | test/challenge_gov/submission_documents_test.exs | smartlogic/Challenge_gov | b4203d1fcfb742dd17ecfadb9e9c56ad836d4254 | [
"CC0-1.0"
] | 4 | 2020-04-27T22:58:57.000Z | 2022-01-14T13:42:09.000Z | defmodule ChallengeGov.SubmissionDocumentsTest do
use ChallengeGov.DataCase
alias ChallengeGov.Submissions
alias ChallengeGov.SubmissionDocuments
alias ChallengeGov.TestHelpers.AccountHelpers
alias ChallengeGov.TestHelpers.ChallengeHelpers
alias ChallengeGov.TestHelpers.SubmissionHelpers
alias ChallengeGov.TestHelpers.SubmissionDocumentHelpers
describe "uploading documents" do
test "successfully" do
user = AccountHelpers.create_user()
{:ok, document} =
SubmissionDocuments.upload(user, %{
"file" => %{path: "test/fixtures/test.pdf"},
"name" => "Test File Name"
})
assert document.user_id == user.id
assert document.extension == ".pdf"
assert document.key
assert document.name === "Test File Name"
end
end
describe "attaching to a submission" do
test "successfully" do
user = AccountHelpers.create_user()
challenge = ChallengeHelpers.create_single_phase_challenge(user, %{user_id: user.id})
submission = SubmissionHelpers.create_submitted_submission(%{}, user, challenge)
document =
SubmissionDocumentHelpers.upload_document(
user,
"test/fixtures/test.pdf",
"Test File Name"
)
{:ok, document} = SubmissionDocuments.attach_to_submission(document, submission)
assert document.submission_id == submission.id
assert document.name === "Test File Name"
end
test "already assigned" do
user = AccountHelpers.create_user()
challenge = ChallengeHelpers.create_single_phase_challenge(user, %{user_id: user.id})
submission_1 = SubmissionHelpers.create_submitted_submission(%{}, user, challenge)
submission_2 = SubmissionHelpers.create_submitted_submission(%{}, user, challenge)
document = SubmissionDocumentHelpers.upload_document(user, "test/fixtures/test.pdf")
{:ok, document} = SubmissionDocuments.attach_to_submission(document, submission_1)
{:error, _changeset} = SubmissionDocuments.attach_to_submission(document, submission_2)
assert document.name === ""
end
test "attempting to assign another user's submission" do
user = AccountHelpers.create_user()
challenge = ChallengeHelpers.create_single_phase_challenge(user, %{user_id: user.id})
user_1 = AccountHelpers.create_user(%{email: "user1@example.com"})
submission_1 = SubmissionHelpers.create_submitted_submission(%{}, user_1, challenge)
user_2 = AccountHelpers.create_user(%{email: "user2@example.com"})
document = SubmissionDocumentHelpers.upload_document(user_2, "test/fixtures/test.pdf")
{:error, _changeset} = SubmissionDocuments.attach_to_submission(document, submission_1)
end
end
describe "deleting a document" do
test "successfully" do
user = AccountHelpers.create_user()
document = SubmissionDocumentHelpers.upload_document(user, "test/fixtures/test.pdf")
{:ok, _document} = SubmissionDocuments.delete(document)
end
end
describe "preserving uploaded document(s) on form error" do
test "successfully" do
user = AccountHelpers.create_user()
challenge = ChallengeHelpers.create_single_phase_challenge(user, %{user_id: user.id})
phase = Enum.at(challenge.phases, 0)
document =
SubmissionDocumentHelpers.upload_document(
user,
"test/fixtures/test.pdf",
"Test File Name"
)
{:error, changeset} =
Submissions.create_review(
%{
"action" => "review",
"document_ids" => ["#{document.id}"],
"documents" => [document],
"submission" => %{
"brief_description" => "brief description",
"description" => "long description"
}
},
user,
challenge,
phase
)
assert changeset.errors
assert changeset.changes[:document_ids] === ["#{document.id}"]
assert hd(changeset.changes[:document_objects]).name === document.name
end
end
end
| 34.294118 | 93 | 0.676795 |
ff669df13c506ac872de9195a591c7d9ba5bc24f | 1,375 | ex | Elixir | lib/discovery_api_web/plugs/response_cache.ex | folkengine/discovery_api | 0bb7a0aff4603a4f7532cc65ca8c8dbb37a57217 | [
"Apache-2.0"
] | 1 | 2021-04-05T19:17:18.000Z | 2021-04-05T19:17:18.000Z | lib/discovery_api_web/plugs/response_cache.ex | folkengine/discovery_api | 0bb7a0aff4603a4f7532cc65ca8c8dbb37a57217 | [
"Apache-2.0"
] | 11 | 2020-01-07T15:43:42.000Z | 2020-12-22T15:23:25.000Z | lib/discovery_api_web/plugs/response_cache.ex | folkengine/discovery_api | 0bb7a0aff4603a4f7532cc65ca8c8dbb37a57217 | [
"Apache-2.0"
] | null | null | null | defmodule DiscoveryApiWeb.Plugs.ResponseCache do
@moduledoc """
Plug that will cache responses for configured url patterns
"""
import Plug.Conn
require Logger
def child_spec([]) do
Supervisor.child_spec({Cachex, __MODULE__}, id: __MODULE__)
end
def init(opts) do
opts
end
def call(conn, opts) do
do_call(conn, conn.params in opts.for_params)
end
def invalidate() do
Cachex.clear(__MODULE__)
Logger.debug(fn -> "Cache cleared" end)
end
defp do_call(conn, true = _match) do
case Cachex.get(__MODULE__, {conn.request_path, conn.params}) do
{:ok, nil} ->
conn
|> register_hook()
{:ok, response} ->
Logger.debug(fn -> "Responding to #{conn.request_path} / #{inspect(conn.params)} with entry from cache" end)
conn
|> merge_resp_headers(response.resp_headers)
|> send_resp(200, response.resp_body)
|> halt()
end
end
defp do_call(conn, false = _match) do
conn
end
defp register_hook(conn) do
register_before_send(conn, fn conn ->
Cachex.put(__MODULE__, {conn.request_path, conn.params}, %{resp_headers: content_headers(conn), resp_body: conn.resp_body})
conn
end)
end
defp content_headers(conn) do
conn.resp_headers
|> Enum.filter(fn {name, _value} -> String.starts_with?(name, "content-") end)
end
end
| 24.122807 | 129 | 0.658909 |
ff66c7551050b93bdb270fd39708b5f4dda6eae0 | 11,049 | ex | Elixir | lib/elixir/lib/map_set.ex | hurrycaner/elixir | c54daf97764c222a3ade6c84ac89bacb68abb0fb | [
"Apache-2.0"
] | 243 | 2020-02-03T03:48:51.000Z | 2021-11-08T12:56:25.000Z | lib/elixir/lib/map_set.ex | hurrycaner/elixir | c54daf97764c222a3ade6c84ac89bacb68abb0fb | [
"Apache-2.0"
] | 6 | 2021-03-19T12:33:21.000Z | 2021-04-02T17:52:45.000Z | lib/elixir/lib/map_set.ex | hurrycaner/elixir | c54daf97764c222a3ade6c84ac89bacb68abb0fb | [
"Apache-2.0"
] | null | null | null | defmodule MapSet do
@moduledoc """
Functions that work on sets.
A set is a data structure that can contain unique elements of any kind,
without any particular order. `MapSet` is the "go to" set data structure in Elixir.
A set can be constructed using `MapSet.new/0`:
iex> MapSet.new()
#MapSet<[]>
Elements in a set don't have to be of the same type and they can be
populated from an [enumerable](`t:Enumerable.t/0`) using `MapSet.new/1`:
iex> MapSet.new([1, :two, {"three"}])
#MapSet<[1, :two, {"three"}]>
Elements can be inserted using `MapSet.put/2`:
iex> MapSet.new([2]) |> MapSet.put(4) |> MapSet.put(0)
#MapSet<[0, 2, 4]>
By definition, sets can't contain duplicate elements: when
inserting an element in a set where it's already present, the insertion is
simply a no-op.
iex> map_set = MapSet.new()
iex> MapSet.put(map_set, "foo")
#MapSet<["foo"]>
iex> map_set |> MapSet.put("foo") |> MapSet.put("foo")
#MapSet<["foo"]>
A `MapSet` is represented internally using the `%MapSet{}` struct. This struct
can be used whenever there's a need to pattern match on something being a `MapSet`:
iex> match?(%MapSet{}, MapSet.new())
true
Note that, however, the struct fields are private and must not be accessed
directly; use the functions in this module to perform operations on sets.
`MapSet`s can also be constructed starting from other collection-type data
structures: for example, see `MapSet.new/1` or `Enum.into/2`.
`MapSet` is built on top of `Map`, this means that they share many properties,
including logarithmic time complexity. See the documentation for `Map` for more
information on its execution time complexity.
"""
# MapSets have an underlying Map. MapSet elements are keys of said map,
# and this empty list is their associated dummy value.
@dummy_value []
@type value :: term
@opaque t(value) :: %__MODULE__{map: %{optional(value) => []}}
@type t :: t(term)
# TODO: Remove version key on Elixir v2.0
defstruct map: %{}, version: 2
@doc """
Returns a new set.
## Examples
iex> MapSet.new()
#MapSet<[]>
"""
@spec new :: t
def new(), do: %MapSet{}
@doc """
Creates a set from an enumerable.
## Examples
iex> MapSet.new([:b, :a, 3])
#MapSet<[3, :a, :b]>
iex> MapSet.new([3, 3, 3, 2, 2, 1])
#MapSet<[1, 2, 3]>
"""
@spec new(Enum.t()) :: t
def new(enumerable)
def new(%__MODULE__{} = map_set), do: map_set
def new(enumerable) do
map =
enumerable
|> Enum.to_list()
|> new_from_list([])
%MapSet{map: map}
end
@doc """
Creates a set from an enumerable via the transformation function.
## Examples
iex> MapSet.new([1, 2, 1], fn x -> 2 * x end)
#MapSet<[2, 4]>
"""
@spec new(Enum.t(), (term -> val)) :: t(val) when val: value
def new(enumerable, transform) when is_function(transform, 1) do
map =
enumerable
|> Enum.to_list()
|> new_from_list_transform(transform, [])
%MapSet{map: map}
end
defp new_from_list([], acc) do
Map.new(acc)
end
defp new_from_list([element | rest], acc) do
new_from_list(rest, [{element, @dummy_value} | acc])
end
defp new_from_list_transform([], _fun, acc) do
Map.new(acc)
end
defp new_from_list_transform([element | rest], fun, acc) do
new_from_list_transform(rest, fun, [{fun.(element), @dummy_value} | acc])
end
@doc """
Deletes `value` from `map_set`.
Returns a new set which is a copy of `map_set` but without `value`.
## Examples
iex> map_set = MapSet.new([1, 2, 3])
iex> MapSet.delete(map_set, 4)
#MapSet<[1, 2, 3]>
iex> MapSet.delete(map_set, 2)
#MapSet<[1, 3]>
"""
@spec delete(t(val1), val2) :: t(val1) when val1: value, val2: value
def delete(%MapSet{map: map} = map_set, value) do
%{map_set | map: Map.delete(map, value)}
end
@doc """
Returns a set that is `map_set1` without the members of `map_set2`.
## Examples
iex> MapSet.difference(MapSet.new([1, 2]), MapSet.new([2, 3, 4]))
#MapSet<[1]>
"""
@spec difference(t(val1), t(val2)) :: t(val1) when val1: value, val2: value
def difference(map_set1, map_set2)
# If the first set is less than twice the size of the second map, it is fastest
# to re-accumulate elements in the first set that are not present in the second set.
def difference(%MapSet{map: map1}, %MapSet{map: map2})
when map_size(map1) < map_size(map2) * 2 do
map =
map1
|> :maps.iterator()
|> :maps.next()
|> filter_not_in(map2, [])
%MapSet{map: map}
end
# If the second set is less than half the size of the first set, it's fastest
# to simply iterate through each element in the second set, deleting them from
# the first set.
def difference(%MapSet{map: map1} = map_set, %MapSet{map: map2}) do
%{map_set | map: Map.drop(map1, Map.keys(map2))}
end
defp filter_not_in(:none, _map2, acc), do: Map.new(acc)
defp filter_not_in({key, _val, iter}, map2, acc) do
if :erlang.is_map_key(key, map2) do
filter_not_in(:maps.next(iter), map2, acc)
else
filter_not_in(:maps.next(iter), map2, [{key, @dummy_value} | acc])
end
end
@doc """
Checks if `map_set1` and `map_set2` have no members in common.
## Examples
iex> MapSet.disjoint?(MapSet.new([1, 2]), MapSet.new([3, 4]))
true
iex> MapSet.disjoint?(MapSet.new([1, 2]), MapSet.new([2, 3]))
false
"""
@spec disjoint?(t, t) :: boolean
def disjoint?(%MapSet{map: map1}, %MapSet{map: map2}) do
{map1, map2} = order_by_size(map1, map2)
map1
|> :maps.iterator()
|> :maps.next()
|> none_in?(map2)
end
defp none_in?(:none, _), do: true
defp none_in?({key, _val, iter}, map2) do
not :erlang.is_map_key(key, map2) and none_in?(:maps.next(iter), map2)
end
@doc """
Checks if two sets are equal.
The comparison between elements is done using `===/2`,
which a set with `1` is not equivalent to a set with
`1.0`.
## Examples
iex> MapSet.equal?(MapSet.new([1, 2]), MapSet.new([2, 1, 1]))
true
iex> MapSet.equal?(MapSet.new([1, 2]), MapSet.new([3, 4]))
false
iex> MapSet.equal?(MapSet.new([1]), MapSet.new([1.0]))
false
"""
@spec equal?(t, t) :: boolean
def equal?(%MapSet{map: map1, version: version}, %MapSet{map: map2, version: version}) do
map1 === map2
end
# Elixir v1.5 changed the map representation, so on
# version mismatch we need to compare the keys directly.
def equal?(%MapSet{map: map1}, %MapSet{map: map2}) do
map_size(map1) == map_size(map2) and all_in?(map1, map2)
end
@doc """
Returns a set containing only members that `map_set1` and `map_set2` have in common.
## Examples
iex> MapSet.intersection(MapSet.new([1, 2]), MapSet.new([2, 3, 4]))
#MapSet<[2]>
iex> MapSet.intersection(MapSet.new([1, 2]), MapSet.new([3, 4]))
#MapSet<[]>
"""
@spec intersection(t(val), t(val)) :: t(val) when val: value
def intersection(%MapSet{map: map1} = map_set, %MapSet{map: map2}) do
{map1, map2} = order_by_size(map1, map2)
%{map_set | map: Map.take(map2, Map.keys(map1))}
end
@doc """
Checks if `map_set` contains `value`.
## Examples
iex> MapSet.member?(MapSet.new([1, 2, 3]), 2)
true
iex> MapSet.member?(MapSet.new([1, 2, 3]), 4)
false
"""
@spec member?(t, value) :: boolean
def member?(%MapSet{map: map}, value) do
:erlang.is_map_key(value, map)
end
@doc """
Inserts `value` into `map_set` if `map_set` doesn't already contain it.
## Examples
iex> MapSet.put(MapSet.new([1, 2, 3]), 3)
#MapSet<[1, 2, 3]>
iex> MapSet.put(MapSet.new([1, 2, 3]), 4)
#MapSet<[1, 2, 3, 4]>
"""
@spec put(t(val), new_val) :: t(val | new_val) when val: value, new_val: value
def put(%MapSet{map: map} = map_set, value) do
%{map_set | map: Map.put(map, value, @dummy_value)}
end
@doc """
Returns the number of elements in `map_set`.
## Examples
iex> MapSet.size(MapSet.new([1, 2, 3]))
3
"""
@spec size(t) :: non_neg_integer
def size(%MapSet{map: map}) do
map_size(map)
end
@doc """
Checks if `map_set1`'s members are all contained in `map_set2`.
This function checks if `map_set1` is a subset of `map_set2`.
## Examples
iex> MapSet.subset?(MapSet.new([1, 2]), MapSet.new([1, 2, 3]))
true
iex> MapSet.subset?(MapSet.new([1, 2, 3]), MapSet.new([1, 2]))
false
"""
@spec subset?(t, t) :: boolean
def subset?(%MapSet{map: map1}, %MapSet{map: map2}) do
map_size(map1) <= map_size(map2) and all_in?(map1, map2)
end
defp all_in?(:none, _), do: true
defp all_in?({key, _val, iter}, map2) do
:erlang.is_map_key(key, map2) and all_in?(:maps.next(iter), map2)
end
defp all_in?(map1, map2) when is_map(map1) and is_map(map2) do
map1
|> :maps.iterator()
|> :maps.next()
|> all_in?(map2)
end
@doc """
Converts `map_set` to a list.
## Examples
iex> MapSet.to_list(MapSet.new([1, 2, 3]))
[1, 2, 3]
"""
@spec to_list(t(val)) :: [val] when val: value
def to_list(%MapSet{map: map}) do
Map.keys(map)
end
@doc """
Returns a set containing all members of `map_set1` and `map_set2`.
## Examples
iex> MapSet.union(MapSet.new([1, 2]), MapSet.new([2, 3, 4]))
#MapSet<[1, 2, 3, 4]>
"""
@spec union(t(val1), t(val2)) :: t(val1 | val2) when val1: value, val2: value
def union(map_set1, map_set2)
def union(%MapSet{map: map1, version: version} = map_set, %MapSet{map: map2, version: version}) do
%{map_set | map: Map.merge(map1, map2)}
end
def union(%MapSet{map: map1}, %MapSet{map: map2}) do
map = new_from_list(Map.keys(map1) ++ Map.keys(map2), [])
%MapSet{map: map}
end
@compile {:inline, [order_by_size: 2]}
defp order_by_size(map1, map2) when map_size(map1) > map_size(map2), do: {map2, map1}
defp order_by_size(map1, map2), do: {map1, map2}
defimpl Enumerable do
def count(map_set) do
{:ok, MapSet.size(map_set)}
end
def member?(map_set, val) do
{:ok, MapSet.member?(map_set, val)}
end
def slice(map_set) do
size = MapSet.size(map_set)
{:ok, size, &Enumerable.List.slice(MapSet.to_list(map_set), &1, &2, size)}
end
def reduce(map_set, acc, fun) do
Enumerable.List.reduce(MapSet.to_list(map_set), acc, fun)
end
end
defimpl Collectable do
def into(map_set) do
fun = fn
list, {:cont, x} -> [{x, []} | list]
list, :done -> %{map_set | map: Map.merge(map_set.map, Map.new(list))}
_, :halt -> :ok
end
{[], fun}
end
end
defimpl Inspect do
import Inspect.Algebra
def inspect(map_set, opts) do
opts = %Inspect.Opts{opts | charlists: :as_lists}
concat(["#MapSet<", Inspect.List.inspect(MapSet.to_list(map_set), opts), ">"])
end
end
end
| 25.997647 | 100 | 0.613721 |
ff66d42485548cf64250810ec2ff7d5b14d2b3f1 | 640 | exs | Elixir | test/grizzly/zwave/commands/schedule_entry_type_supported_get_test.exs | smartrent/grizzly | 65a397ea7bfedb5518fe63a3f058a0b6af473e39 | [
"Apache-2.0"
] | 76 | 2019-09-04T16:56:58.000Z | 2022-03-29T06:54:36.000Z | test/grizzly/zwave/commands/schedule_entry_type_supported_get_test.exs | smartrent/grizzly | 65a397ea7bfedb5518fe63a3f058a0b6af473e39 | [
"Apache-2.0"
] | 124 | 2019-09-05T14:01:24.000Z | 2022-02-28T22:58:14.000Z | test/grizzly/zwave/commands/schedule_entry_type_supported_get_test.exs | smartrent/grizzly | 65a397ea7bfedb5518fe63a3f058a0b6af473e39 | [
"Apache-2.0"
] | 10 | 2019-10-23T19:25:45.000Z | 2021-11-17T13:21:20.000Z | defmodule Grizzly.ZWave.Commands.ScheduleEntryTypeSupportedGetTest do
use ExUnit.Case, async: true
alias Grizzly.ZWave.Commands.ScheduleEntryTypeSupportedGet
test "creates the command and validates params" do
{:ok, _command} = ScheduleEntryTypeSupportedGet.new()
end
test "encodes params correctly" do
{:ok, command} = ScheduleEntryTypeSupportedGet.new()
expected_binary = <<>>
assert expected_binary == ScheduleEntryTypeSupportedGet.encode_params(command)
end
test "decodes params correctly" do
binary_params = <<>>
{:ok, []} = ScheduleEntryTypeSupportedGet.decode_params(binary_params)
end
end
| 30.47619 | 82 | 0.760938 |
ff66e5673dc7996052fcda1c2f563670b3f276b5 | 6,251 | ex | Elixir | lib/absinthe/blueprint.ex | zoldar/absinthe | 72ff9f91fcc0a261f9965cf8120c7c72ff6e4c7c | [
"MIT"
] | 4,101 | 2016-03-02T03:49:20.000Z | 2022-03-31T05:46:01.000Z | lib/absinthe/blueprint.ex | zoldar/absinthe | 72ff9f91fcc0a261f9965cf8120c7c72ff6e4c7c | [
"MIT"
] | 889 | 2016-03-02T16:06:59.000Z | 2022-03-31T20:24:12.000Z | lib/absinthe/blueprint.ex | zoldar/absinthe | 72ff9f91fcc0a261f9965cf8120c7c72ff6e4c7c | [
"MIT"
] | 564 | 2016-03-02T07:49:59.000Z | 2022-03-06T14:40:59.000Z | defmodule Absinthe.Blueprint do
@moduledoc """
Represents the graphql document to be executed.
Please see the code itself for more information on individual blueprint sub
modules.
"""
alias __MODULE__
defstruct operations: [],
directives: [],
fragments: [],
name: nil,
schema_definitions: [],
schema: nil,
prototype_schema: nil,
adapter: nil,
initial_phases: [],
# Added by phases
telemetry: %{},
flags: %{},
errors: [],
input: nil,
source: nil,
execution: %Blueprint.Execution{},
result: %{}
@type t :: %__MODULE__{
operations: [Blueprint.Document.Operation.t()],
schema_definitions: [Blueprint.Schema.SchemaDefinition.t()],
directives: [Blueprint.Schema.DirectiveDefinition.t()],
name: nil | String.t(),
fragments: [Blueprint.Document.Fragment.Named.t()],
schema: nil | Absinthe.Schema.t(),
prototype_schema: nil | Absinthe.Schema.t(),
adapter: nil | Absinthe.Adapter.t(),
# Added by phases
telemetry: map,
errors: [Absinthe.Phase.Error.t()],
flags: flags_t,
input: nil | Absinthe.Language.Document.t(),
source: nil | String.t() | Absinthe.Language.Source.t(),
execution: Blueprint.Execution.t(),
result: result_t,
initial_phases: [Absinthe.Phase.t()]
}
@type result_t :: %{
optional(:data) => term,
optional(:errors) => [term],
optional(:extensions) => term
}
@type node_t ::
t()
| Blueprint.Directive.t()
| Blueprint.Document.t()
| Blueprint.Schema.t()
| Blueprint.Input.t()
| Blueprint.TypeReference.t()
@type use_t ::
Blueprint.Document.Fragment.Named.Use.t()
| Blueprint.Input.Variable.Use.t()
@type flags_t :: %{atom => module}
defdelegate prewalk(blueprint, fun), to: Absinthe.Blueprint.Transform
defdelegate prewalk(blueprint, acc, fun), to: Absinthe.Blueprint.Transform
defdelegate postwalk(blueprint, fun), to: Absinthe.Blueprint.Transform
defdelegate postwalk(blueprint, acc, fun), to: Absinthe.Blueprint.Transform
def find(blueprint, fun) do
{_, found} =
Blueprint.prewalk(blueprint, nil, fn
node, nil ->
if fun.(node) do
{node, node}
else
{node, nil}
end
node, found ->
# Already found
{node, found}
end)
found
end
@doc false
# This is largely a debugging tool which replaces `schema_node` struct values
# with just the type identifier, rendering the blueprint tree much easier to read
def __compress__(blueprint) do
prewalk(blueprint, fn
%{schema_node: %{identifier: id}} = node ->
%{node | schema_node: id}
node ->
node
end)
end
@spec fragment(t, String.t()) :: nil | Blueprint.Document.Fragment.Named.t()
def fragment(blueprint, name) do
Enum.find(blueprint.fragments, &(&1.name == name))
end
@doc """
Add a flag to a node.
"""
@spec put_flag(node_t, atom, module) :: node_t
def put_flag(node, flag, mod) do
update_in(node.flags, &Map.put(&1, flag, mod))
end
@doc """
Determine whether a flag has been set on a node.
"""
@spec flagged?(node_t, atom) :: boolean
def flagged?(node, flag) do
Map.has_key?(node.flags, flag)
end
@doc """
Get the currently selected operation.
"""
@spec current_operation(t) :: nil | Blueprint.Document.Operation.t()
def current_operation(blueprint) do
Enum.find(blueprint.operations, &(&1.current == true))
end
@doc """
Update the current operation.
"""
@spec update_current(t, (Blueprint.Document.Operation.t() -> Blueprint.Document.Operation.t())) ::
t
def update_current(blueprint, change) do
ops =
Enum.map(blueprint.operations, fn
%{current: true} = op ->
change.(op)
other ->
other
end)
%{blueprint | operations: ops}
end
@doc """
Append the given field or fields to the given type
"""
def extend_fields(blueprint = %Blueprint{}, ext_blueprint = %Blueprint{}) do
ext_types = types_by_name(ext_blueprint)
schema_defs =
for schema_def = %{type_definitions: type_defs} <- blueprint.schema_definitions do
type_defs =
for type_def <- type_defs do
case ext_types[type_def.name] do
nil ->
type_def
%{fields: new_fields} ->
%{type_def | fields: type_def.fields ++ new_fields}
end
end
%{schema_def | type_definitions: type_defs}
end
%{blueprint | schema_definitions: schema_defs}
end
def extend_fields(blueprint, ext_blueprint) when is_atom(ext_blueprint) do
extend_fields(blueprint, ext_blueprint.__absinthe_blueprint__())
end
def add_field(blueprint = %Blueprint{}, type_def_name, new_field) do
schema_defs =
for schema_def = %{type_definitions: type_defs} <- blueprint.schema_definitions do
type_defs =
for type_def <- type_defs do
if type_def.name == type_def_name do
%{type_def | fields: type_def.fields ++ List.wrap(new_field)}
else
type_def
end
end
%{schema_def | type_definitions: type_defs}
end
%{blueprint | schema_definitions: schema_defs}
end
def find_field(%{fields: fields}, name) do
Enum.find(fields, fn %{name: field_name} -> field_name == name end)
end
@doc """
Index the types by their name
"""
def types_by_name(blueprint = %Blueprint{}) do
for %{type_definitions: type_defs} <- blueprint.schema_definitions,
type_def <- type_defs,
into: %{} do
{type_def.name, type_def}
end
end
def types_by_name(module) when is_atom(module) do
types_by_name(module.__absinthe_blueprint__())
end
defimpl Inspect do
defdelegate inspect(term, options),
to: Absinthe.Schema.Notation.SDL.Render
end
end
| 28.03139 | 100 | 0.600704 |
ff674051151f90060b0a14b4693814edf1fa0caa | 1,217 | ex | Elixir | tags/web/views/error_helpers.ex | merxer/kata | 5dbbca8b4173029f9311398148de9437a329cf9a | [
"MIT"
] | null | null | null | tags/web/views/error_helpers.ex | merxer/kata | 5dbbca8b4173029f9311398148de9437a329cf9a | [
"MIT"
] | null | null | null | tags/web/views/error_helpers.ex | merxer/kata | 5dbbca8b4173029f9311398148de9437a329cf9a | [
"MIT"
] | null | null | null | defmodule Tags.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
if error = form.errors[field] do
content_tag :span, translate_error(error), class: "help-block"
end
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# Because error messages were defined within Ecto, we must
# call the Gettext module passing our Gettext backend. We
# also use the "errors" domain as translations are placed
# in the errors.po file.
# Ecto will pass the :count keyword if the error message is
# meant to be pluralized.
# On your own code and templates, depending on whether you
# need the message to be pluralized or not, this could be
# written simply as:
#
# dngettext "errors", "1 file", "%{count} files", count
# dgettext "errors", "is invalid"
#
if count = opts[:count] do
Gettext.dngettext(Tags.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(Tags.Gettext, "errors", msg, opts)
end
end
end
| 29.682927 | 70 | 0.664749 |
ff675329b26cb799dd9018dfe011d27f3f3a3517 | 777 | exs | Elixir | test/estated/property/valuation_test.exs | jdav-dev/estated | a8476b803eff425b5b73517e7ea180bb7f8cc30b | [
"Apache-2.0"
] | null | null | null | test/estated/property/valuation_test.exs | jdav-dev/estated | a8476b803eff425b5b73517e7ea180bb7f8cc30b | [
"Apache-2.0"
] | null | null | null | test/estated/property/valuation_test.exs | jdav-dev/estated | a8476b803eff425b5b73517e7ea180bb7f8cc30b | [
"Apache-2.0"
] | null | null | null | defmodule Estated.Property.ValuationTest do
use ExUnit.Case, async: true
alias Estated.Property.Valuation
doctest Valuation
describe "cast/1" do
@tag :unit
test "casts a map as a Valuation" do
valuation = %{
"value" => 16_430,
"high" => 17_220,
"low" => 15_780,
"forecast_standard_deviation" => 55,
"date" => "2019-10-24",
"ignore me" => nil
}
assert %Valuation{
value: 16_430,
high: 17_220,
low: 15_780,
forecast_standard_deviation: 55,
date: ~D[2019-10-24]
} == Valuation.cast(valuation)
end
@tag :unit
test "casts nil as nil" do
assert nil == Valuation.cast(nil)
end
end
end
| 22.2 | 47 | 0.537967 |
ff6755104985986f887e979208ffa7aa8a80ad51 | 4,863 | ex | Elixir | clients/testing/lib/google_api/testing/v1/model/test_matrix.ex | MechimCook/elixir-google-api | 0240ede69ec77115076724d223f9d1f849ff1d6b | [
"Apache-2.0"
] | null | null | null | clients/testing/lib/google_api/testing/v1/model/test_matrix.ex | MechimCook/elixir-google-api | 0240ede69ec77115076724d223f9d1f849ff1d6b | [
"Apache-2.0"
] | null | null | null | clients/testing/lib/google_api/testing/v1/model/test_matrix.ex | MechimCook/elixir-google-api | 0240ede69ec77115076724d223f9d1f849ff1d6b | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Testing.V1.Model.TestMatrix do
@moduledoc """
A group of one or more TestExecutions, built by taking a product of values over a pre-defined set of axes.
## Attributes
- clientInfo (ClientInfo): Information about the client which invoked the test. Defaults to: `null`.
- environmentMatrix (EnvironmentMatrix): Required. How the host machine(s) are configured. Defaults to: `null`.
- flakyTestAttempts (integer()): The number of times a TestExecution should be re-attempted if one or more of its test cases fail for any reason. The maximum number of reruns allowed is 10. Default is 0, which implies no reruns. Defaults to: `null`.
- invalidMatrixDetails (String.t): Output only. Describes why the matrix is considered invalid. Only useful for matrices in the INVALID state. Defaults to: `null`.
- Enum - one of [INVALID_MATRIX_DETAILS_UNSPECIFIED, DETAILS_UNAVAILABLE, MALFORMED_APK, MALFORMED_TEST_APK, NO_MANIFEST, NO_PACKAGE_NAME, INVALID_PACKAGE_NAME, TEST_SAME_AS_APP, NO_INSTRUMENTATION, NO_SIGNATURE, INSTRUMENTATION_ORCHESTRATOR_INCOMPATIBLE, NO_TEST_RUNNER_CLASS, NO_LAUNCHER_ACTIVITY, FORBIDDEN_PERMISSIONS, INVALID_ROBO_DIRECTIVES, INVALID_RESOURCE_NAME, INVALID_DIRECTIVE_ACTION, TEST_LOOP_INTENT_FILTER_NOT_FOUND, SCENARIO_LABEL_NOT_DECLARED, SCENARIO_LABEL_MALFORMED, SCENARIO_NOT_DECLARED, DEVICE_ADMIN_RECEIVER, MALFORMED_XC_TEST_ZIP, BUILT_FOR_IOS_SIMULATOR, NO_TESTS_IN_XC_TEST_ZIP, USE_DESTINATION_ARTIFACTS, TEST_NOT_APP_HOSTED, PLIST_CANNOT_BE_PARSED, TEST_ONLY_APK, MALFORMED_IPA, NO_CODE_APK, INVALID_INPUT_APK, INVALID_APK_PREVIEW_SDK]
- projectId (String.t): The cloud project that owns the test matrix. Defaults to: `null`.
- resultStorage (ResultStorage): Required. Where the results for the matrix are written. Defaults to: `null`.
- state (String.t): Output only. Indicates the current progress of the test matrix (e.g., FINISHED). Defaults to: `null`.
- Enum - one of [TEST_STATE_UNSPECIFIED, VALIDATING, PENDING, RUNNING, FINISHED, ERROR, UNSUPPORTED_ENVIRONMENT, INCOMPATIBLE_ENVIRONMENT, INCOMPATIBLE_ARCHITECTURE, CANCELLED, INVALID]
- testExecutions ([TestExecution]): Output only. The list of test executions that the service creates for this matrix. Defaults to: `null`.
- testMatrixId (String.t): Output only. Unique id set by the service. Defaults to: `null`.
- testSpecification (TestSpecification): Required. How to run the test. Defaults to: `null`.
- timestamp (DateTime.t): Output only. The time this test matrix was initially created. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:clientInfo => GoogleApi.Testing.V1.Model.ClientInfo.t(),
:environmentMatrix => GoogleApi.Testing.V1.Model.EnvironmentMatrix.t(),
:flakyTestAttempts => any(),
:invalidMatrixDetails => any(),
:projectId => any(),
:resultStorage => GoogleApi.Testing.V1.Model.ResultStorage.t(),
:state => any(),
:testExecutions => list(GoogleApi.Testing.V1.Model.TestExecution.t()),
:testMatrixId => any(),
:testSpecification => GoogleApi.Testing.V1.Model.TestSpecification.t(),
:timestamp => DateTime.t()
}
field(:clientInfo, as: GoogleApi.Testing.V1.Model.ClientInfo)
field(:environmentMatrix, as: GoogleApi.Testing.V1.Model.EnvironmentMatrix)
field(:flakyTestAttempts)
field(:invalidMatrixDetails)
field(:projectId)
field(:resultStorage, as: GoogleApi.Testing.V1.Model.ResultStorage)
field(:state)
field(:testExecutions, as: GoogleApi.Testing.V1.Model.TestExecution, type: :list)
field(:testMatrixId)
field(:testSpecification, as: GoogleApi.Testing.V1.Model.TestSpecification)
field(:timestamp, as: DateTime)
end
defimpl Poison.Decoder, for: GoogleApi.Testing.V1.Model.TestMatrix do
def decode(value, options) do
GoogleApi.Testing.V1.Model.TestMatrix.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Testing.V1.Model.TestMatrix do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 60.7875 | 768 | 0.761875 |
ff676ad6d4359ea194c51c36cf655096eb3996cd | 3,190 | ex | Elixir | lib/dynamic_page_handler.ex | EfeElixir/cowboy-elixir-example | 36f728d2e6d223b9cb0981cabcca6aefb6859cf1 | [
"MIT"
] | null | null | null | lib/dynamic_page_handler.ex | EfeElixir/cowboy-elixir-example | 36f728d2e6d223b9cb0981cabcca6aefb6859cf1 | [
"MIT"
] | null | null | null | lib/dynamic_page_handler.ex | EfeElixir/cowboy-elixir-example | 36f728d2e6d223b9cb0981cabcca6aefb6859cf1 | [
"MIT"
] | null | null | null |
defmodule DynamicPageHandler do
@moduledoc """
A cowboy handler for serving a single dynamic wepbage. No templates are used; the
HTML is all generated within the handler.
"""
@doc """
inititalize a plain HTTP handler. See the documentation here:
http://ninenines.eu/docs/en/cowboy/1.0/manual/cowboy_http_handler/
All cowboy HTTP handlers require an init() function, identifies which
type of handler this is and returns an initial state (if the handler
maintains state). In a plain http handler, you just return a
3-tuple with :ok. We don't need to track a state in this handler, so
we're returning the atom :no_state.
"""
def init(_type, req, []) do
{:ok, req, :no_state}
end
@doc """
Handle a single HTTP request.
In a cowboy handler, the handle/2 function does the work. It should return
a 3-tuple with :ok, a request object (containing the reply), and the current
state.
"""
def handle(request, state) do
# construct a reply, using the cowboy_req:reply/4 function.
#
# reply/4 takes three arguments:
# * The HTTP response status (200, 404, etc.)
# * A list of 2-tuples representing headers
# * The body of the response
# * The original request
{ :ok, reply } = :cowboy_req.reply(
# status code
200,
# headers
[ {"content-type", "text/html"} ],
# body of reply.
build_body(request),
# original request
request
)
# handle/2 returns a tuple starting containing :ok, the reply, and the
# current state of the handler.
{:ok, reply, state}
end
@doc """
Do any cleanup necessary for the termination of this handler.
Usually you don't do much with this. If things are breaking,
try uncommenting the output lines here to get some more info on what's happening.
"""
def terminate(_reason, _request, _state) do
#IO.puts("Terminating for reason: #{inspect(reason)}")
#IO.puts("Terminating after request: #{inspect(request)}")
#IO.puts("Terminating with state: #{inspect(state)}")
:ok
end
@doc """
Assemble the body of a response in HTML.
"""
def build_body(request) do
"""
<html>
<head>
<title>Elixir Cowboy Dynamic Example</title>
<link rel='stylesheet' href='/static/css/styles.css' type='text/css' />
</head>
<body>
<div id='main'>
<h1>Dynamic Page Example</h1>
<p>This page is rendered via the route: <code>{"/dynamic", DynamicPageHandler, []}</code>
<br/>
and the code for the handler can be found in <code>lib/dynamic_page_handler.ex</code>.</p>
<h2>Current Time (:erlang.now)</h2>
<p><span class='time'> #{inspect(:erlang.now)}</span></p>
<p>Reload this page to see the time change.</p>
<h2>Your Request Headers</h2>
<dl>#{dl_headers(request)}</dl>
</div>
</body>
</html>
"""
end
@doc """
Build the contents of a <dl> containing all the request headers.
"""
def dl_headers(request) do
{headers, _req2 } = :cowboy_req.headers(request)
Enum.map(headers, fn item -> "<dt>#{elem(item, 0)}</dt><dd>#{elem(item, 1)}</dd>" end)
end
end
| 29.266055 | 98 | 0.637304 |
ff678ffe25688c22769684b4657ea9cd369172ff | 2,001 | ex | Elixir | web/controllers/tipopagamento_controller.ex | alissonfpmorais/tucano | d22480fc416d14b44862be2ed89040d92b7c08d1 | [
"MIT"
] | null | null | null | web/controllers/tipopagamento_controller.ex | alissonfpmorais/tucano | d22480fc416d14b44862be2ed89040d92b7c08d1 | [
"MIT"
] | null | null | null | web/controllers/tipopagamento_controller.ex | alissonfpmorais/tucano | d22480fc416d14b44862be2ed89040d92b7c08d1 | [
"MIT"
] | null | null | null | defmodule Tucano.TipoPagamentoController do
use Tucano.Web, :controller
plug Tucano.Plugs.RequireAuth
alias Tucano.TipoPagamento
def index(conn, _params) do
tipos_pagamento =
TipoPagamento.get_all
|> Enum.sort(&(&1.tipo <= &2.tipo))
render conn, "index.html", tipos_pagamento: tipos_pagamento
end
def new(conn, _params) do
changeset = TipoPagamento.from_blank
render conn, "new.html", changeset: changeset
end
def create(conn, %{"tipo_pagamento" => tipo_pagamento}) do
case TipoPagamento.insert(tipo_pagamento) do
{:ok, _tipo_pagamento} ->
conn
|> put_flash(:info, "Criação realizada com sucesso!")
|> redirect(to: tipo_pagamento_path(conn, :index))
{:error, changeset} ->
conn
|> put_flash(:error, "Erro no processo de criação!")
|> render("new.html", changeset: changeset)
end
end
def edit(conn, %{"id" => tipo_pagamento_id}) do
tipo_pagamento = TipoPagamento.get_by_id(tipo_pagamento_id)
changeset = TipoPagamento.changeset(tipo_pagamento)
render conn, "edit.html", changeset: changeset, tipo_pagamento: tipo_pagamento
end
def update(conn, %{"id" => tipo_pagamento_id, "tipo_pagamento" => tipo_pagamento}) do
case TipoPagamento.update(tipo_pagamento_id, tipo_pagamento) do
{:ok, _tipo_pagamento} ->
conn
|> put_flash(:info, "Atualização realizada com sucesso!")
|> redirect(to: tipo_pagamento_path(conn, :index))
{:error, changeset} ->
conn
|> put_flash(:error, "Erro no processo de atualização!")
|> render("edit.html", changeset: changeset, tipo_pagamento: TipoPagamento.get_by_id(tipo_pagamento_id))
end
end
def delete(conn, %{"id" => tipo_pagamento_id}) do
TipoPagamento.delete!(tipo_pagamento_id)
conn
|> put_flash(:info, "Remoção realizada com sucesso!")
|> redirect(to: tipo_pagamento_path(conn, :index))
end
def show(conn, _params) do
conn
end
end
| 30.318182 | 112 | 0.674663 |
ff679e0f5e17d534751902b8f23b2c638d57f5df | 2,624 | ex | Elixir | chatting_hangmans/lib/chatting_hangmans/hangman_server.ex | lukpra/Chatting-Hangmans | db1b8ff95ed9e98ad0682960ad0deb81c7b5024d | [
"MIT"
] | null | null | null | chatting_hangmans/lib/chatting_hangmans/hangman_server.ex | lukpra/Chatting-Hangmans | db1b8ff95ed9e98ad0682960ad0deb81c7b5024d | [
"MIT"
] | null | null | null | chatting_hangmans/lib/chatting_hangmans/hangman_server.ex | lukpra/Chatting-Hangmans | db1b8ff95ed9e98ad0682960ad0deb81c7b5024d | [
"MIT"
] | null | null | null | defmodule ChattingHangmans.HangmanServer do
alias ChattingHangmans.Hangman
alias ChattingHangmans.Game
@name :hangman_server
use GenServer
defmodule State do
defstruct games: %{}, games_queue: %{}
end
# Client Interface
def start_link(_arg) do
IO.puts("Starting Hangman Game Server ...")
GenServer.start_link(__MODULE__, %State{}, name: @name)
end
def create_new_game(player_name, secret_phrase) do
GenServer.call(@name, {:create_new_game, player_name, secret_phrase})
end
def guess_a_letter(player_name, secret_letter) do
GenServer.call(@name, {:guess_a_letter, player_name, secret_letter})
end
def current_games_for_user(player_name) do
GenServer.call(@name, {:current_games_for_user, player_name})
end
def current_games do
GenServer.call(@name, :current_games)
end
def clear do
GenServer.cast(@name, :clear)
end
# Server Callbacks
def init(state) do
{:ok, state}
end
def handle_cast(:clear, state) do
{:noreply, %{state | games: %{}}}
end
def handle_call({:current_games_for_user, player_name}, _from, state) do
games_for_given_user = Map.get(state.games, player_name)
{:reply, games_for_given_user, state}
end
def handle_call(:current_games, _from, state) do
{:reply, state.games, state}
end
def handle_call({:create_new_game, player_name, secret_phrase}, _from, state) do
current_game = Map.get(state.games, player_name)
if(current_game == nil) do
new_game = %Game{players: player_name, secret_phrase: secret_phrase}
updated_games = Map.put(state.games, player_name, new_game)
new_state = %{state | games: updated_games}
else
phrase_list = Map.get(state.games, player_name, [])
new_phrase_list = [secret_phrase | phrase_list]
updated_queue = Map.put(state.games, player_name, new_phrase_list)
new_state = %{state | games_queue: updated_queue}
end
{:reply, :created, new_state}
end
def handle_call({:guess_a_letter, player_name, letter}, _from, state) do
current_game = Map.get(state.games, player_name)
current_game = %Game{current_game | current_letter: letter}
advanced_current_game = Hangman.play(current_game)
updated_games = Map.put(state.games, player_name, advanced_current_game)
new_state = %{state | games: updated_games}
{:reply, advanced_current_game, new_state}
end
end
# alias ChattingHangmans.HangmanServer
# {:ok, pid} = HangmanServer.start_link("anything")
# IO.inspect(HangmanServer.create_new_game("larry", "bazinga"))
# IO.inspect(HangmanServer.guess_a_letter("larry", "a"))
| 28.215054 | 82 | 0.719131 |
ff67a987ccf8c3930a1b4298fa1187c70bc692ba | 630 | exs | Elixir | test/five_hundred/bid_test.exs | hugomd/five_hundred | 74d5b251d4da05ff650d786d5627965114f26447 | [
"MIT"
] | 2 | 2021-06-28T07:06:13.000Z | 2021-07-18T01:13:27.000Z | test/five_hundred/bid_test.exs | hugomd/five_hundred | 74d5b251d4da05ff650d786d5627965114f26447 | [
"MIT"
] | 1 | 2021-06-20T07:49:56.000Z | 2021-06-20T07:49:56.000Z | test/five_hundred/bid_test.exs | hugomd/five_hundred | 74d5b251d4da05ff650d786d5627965114f26447 | [
"MIT"
] | 1 | 2021-07-17T10:23:36.000Z | 2021-07-17T10:23:36.000Z | defmodule FiveHundred.BidTest do
use ExUnit.Case
alias FiveHundred.{Bid}
describe "bid generation" do
test "generates correct number of bids" do
result = Bid.bids()
assert length(result) == 28
end
end
describe "bid comparison" do
test "a < b" do
a = %Bid{points: 0}
b = %Bid{points: 1}
assert Bid.compare(a, b) == :lt
end
test "a > b" do
a = %Bid{points: 1}
b = %Bid{points: 0}
assert Bid.compare(a, b) == :gt
end
test "a == b" do
a = %Bid{points: 1}
b = %Bid{points: 1}
assert Bid.compare(a, b) == :eq
end
end
end
| 19.090909 | 46 | 0.547619 |
ff67ad70d02fedd6af14f94137796274f7cbd81d | 349 | exs | Elixir | apps/blog_api/priv/repo/migrations/20190809200442_tokens.exs | yashin5/blog_api | 5dd6d0c9e43ca9c1dc555afd73713b62b4efa34e | [
"MIT"
] | null | null | null | apps/blog_api/priv/repo/migrations/20190809200442_tokens.exs | yashin5/blog_api | 5dd6d0c9e43ca9c1dc555afd73713b62b4efa34e | [
"MIT"
] | null | null | null | apps/blog_api/priv/repo/migrations/20190809200442_tokens.exs | yashin5/blog_api | 5dd6d0c9e43ca9c1dc555afd73713b62b4efa34e | [
"MIT"
] | null | null | null | defmodule BlogApi.Repo.Migrations.Tokens do
use Ecto.Migration
def change do
create table(:tokens, primary_key: false) do
add(:id, :uuid, primary_key: true)
add(:token, :string)
add(:user_id, references(:users, type: :uuid), null: false)
timestamps()
end
create(unique_index(:tokens, [:token]))
end
end
| 20.529412 | 65 | 0.653295 |
ff67e4dd013ad1d22d85b85f5614e62a9aba72e6 | 3,377 | ex | Elixir | apps/omg_jsonrpc/lib/expose_spec/rpc_translate.ex | Pongch/elixir-omg | 8a33c246898b49cba62b847e0989d9b6c89f5106 | [
"Apache-2.0"
] | null | null | null | apps/omg_jsonrpc/lib/expose_spec/rpc_translate.ex | Pongch/elixir-omg | 8a33c246898b49cba62b847e0989d9b6c89f5106 | [
"Apache-2.0"
] | null | null | null | apps/omg_jsonrpc/lib/expose_spec/rpc_translate.ex | Pongch/elixir-omg | 8a33c246898b49cba62b847e0989d9b6c89f5106 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.JSONRPC.ExposeSpec.RPCTranslate do
@moduledoc """
Translate an incoming call to a form that can be executed with :erlang.apply/3
The incoming call can originate from the JSONRPC handler or the Websockets handler (or other)
Returns JSONRPC2-like error values if there is a problem.
"""
@type function_name :: binary
@type arg_name :: binary
@type spec :: OMG.JSONRPC.ExposeSpec.spec()
@type json_args :: %{required(arg_name) => any}
@type rpc_error :: {:method_not_found, map} | {:invalid_params, map}
@doc """
`to_fa/3` transforms JSONRPC2 method and params to Elixir's Function and Arguments,
since the naming. See also type mfa() in Elixir's typespecs.
"""
@spec to_fa(method :: function_name, params :: json_args, spec :: spec) :: {:ok, atom, list(any)} | rpc_error
def to_fa(method, params, spec, on_match \\ &on_match_default/3) do
with {:ok, fname} <- existing_atom(method),
:ok <- is_exposed(fname, spec),
{:ok, args} <- get_args(fname, params, spec, on_match),
do: {:ok, fname, args}
end
defp on_match_default(_name, _type, value), do: {:ok, value}
@spec existing_atom(method :: function_name) :: {:ok, atom} | {:method_not_found, map}
defp existing_atom(method) do
try do
{:ok, String.to_existing_atom(method)}
rescue
ArgumentError -> {:method_not_found, %{method: method}}
end
end
@spec is_exposed(fname :: atom, spec :: spec) :: :ok | {:method_not_found, map}
defp is_exposed(fname, spec) do
case fname in Map.keys(spec) do
true -> :ok
false -> {:method_not_found, %{method: fname}}
end
end
@spec get_args(fname :: atom, params :: json_args, spec :: spec, on_match :: fun()) ::
{:ok, list(any)} | {:invalid_params, map}
defp get_args(fname, params, spec, on_match) when is_map(params) do
validate_args = fn {name, type} = argspec, list ->
value = Map.get(params, Atom.to_string(name))
value = on_match.(name, type, value)
# value has been looked-up in params and possibly decoded by handler-specific code.
# If either failed an arg was missing or given badly
case value do
{:error, _} ->
{:halt, {:missing_arg, argspec}}
nil ->
{:halt, {:missing_arg, argspec}}
{:ok, value} ->
{:cont, list ++ [value]}
end
end
case Enum.reduce_while(spec[fname].args, [], validate_args) do
{:missing_arg, {name, type}} ->
msg = "Please provide parameter `#{name}` of type `#{inspect(type)}`"
{:invalid_params, %{msg: msg, name: name, type: type}}
args ->
{:ok, args}
end
end
defp get_args(_, _, _, _) do
{:invalid_params, %{msg: "params should be a JSON key-value pair array"}}
end
end
| 35.177083 | 111 | 0.654723 |
ff67eb979fd81558a112838ae5b61828f4dad0b9 | 507 | ex | Elixir | lib/protocols/values.ex | skovsgaard/exleveldb | b7fa20b127fe712a50581d88a86339dc37684038 | [
"Apache-2.0"
] | 38 | 2015-01-01T04:12:25.000Z | 2021-06-04T10:19:24.000Z | lib/protocols/values.ex | skovsgaard/exleveldb | b7fa20b127fe712a50581d88a86339dc37684038 | [
"Apache-2.0"
] | 12 | 2015-03-31T23:19:05.000Z | 2016-10-24T14:33:57.000Z | lib/protocols/values.ex | skovsgaard/exleveldb | b7fa20b127fe712a50581d88a86339dc37684038 | [
"Apache-2.0"
] | 6 | 2015-03-28T09:37:33.000Z | 2019-05-10T05:30:37.000Z | defprotocol Exleveldb.Values do
@doc "Implicitly converts integer, atom, or list keys to strings."
def to_value(non_string)
end
defimpl Exleveldb.Values, for: Integer do
def to_value(number), do: Integer.to_string(number)
end
defimpl Exleveldb.Values, for: Atom do
def to_value(atom), do: Atom.to_string(atom)
end
defimpl Exleveldb.Values, for: List do
def to_value(charlist), do: List.to_string(charlist)
end
defimpl Exleveldb.Values, for: BitString do
def to_value(string), do: string
end
| 24.142857 | 68 | 0.767258 |
ff6816bb5099c1dae3aad7873684f4a5d7e9de0a | 2,765 | ex | Elixir | apps/shipping/lib/shipping/cargoes/cargoes.ex | pcmarks/ddd_elixir_demo_stage1 | e496f95cfaca1f9aca0e65a660eb8b999450f2f3 | [
"MIT"
] | 44 | 2017-11-12T17:12:55.000Z | 2022-03-29T18:21:08.000Z | apps/shipping/lib/shipping/cargoes/cargoes.ex | pcmarks/ddd_elixir_demo_stage1 | e496f95cfaca1f9aca0e65a660eb8b999450f2f3 | [
"MIT"
] | 7 | 2017-09-11T12:17:36.000Z | 2017-09-25T13:15:21.000Z | apps/shipping/lib/shipping/cargoes/cargoes.ex | pcmarks/ddd_elixir_demo_stage1 | e496f95cfaca1f9aca0e65a660eb8b999450f2f3 | [
"MIT"
] | 4 | 2019-04-15T08:03:30.000Z | 2021-12-15T16:00:02.000Z | defmodule Shipping.Cargoes do
@moduledoc """
The Cargoes Aggregate*. Its root is the module Shipping.Cargo
Cargoes is also responsible for creating a Delivery History from the Handling Events
From the DDD book: [An AGGREGATE is] a cluster of associated objects that
are treated as a unit for the purgpose of data changes. External references are
restricted to one member of the AGGREGATE, designated as the root.
"""
import Ecto.Query, warn: false
alias Shipping
alias Shipping.Repo
alias Shipping.Cargoes.{Cargo, DeliveryHistory}
alias Shipping.HandlingEvents.HandlingEvent
@doc """
Gets a cargo by its tracking id.
Raises `Ecto.NoResultsError` if the Cargo does not exist.
## Examples
iex> get_cargo_by_tracking_id!(123)
%Cargo{}
iex> get_cargo_by_tracking_id!(456)
** (Ecto.NoResultsError)
"""
def get_cargo_by_tracking_id!(tracking_id), do: Repo.get_by_tracking_id!(Cargo, tracking_id)
@doc """
Updates a cargo.
## Examples
iex> update_cargo(cargo, %{field: new_value})
{:ok, %Cargo{}}
iex> update_cargo(cargo, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_cargo(%Cargo{} = cargo, attrs) do
cargo
|> Cargo.changeset(attrs)
|> Repo.update()
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking cargo changes.
## Examples
iex> change_cargo(cargo)
%Ecto.Changeset{source: %Cargo{}}
"""
def change_cargo(%Cargo{} = cargo) do
Cargo.changeset(cargo, %{})
end
@doc """
Create a DeliveryHistory: a value object of the Cargo's current status. The values
in a DeliveryHistory are determined by applying each of the Cargo's handling events,
in turn, against the DeliveryHistory. Note the Handling Events must be chronological
order, using the completion_time, with the oldest first.
"""
def create_delivery_history(handling_events) do
delivery_history = %DeliveryHistory{}
update_delivery_history(handling_events, delivery_history)
end
defp update_delivery_history([%HandlingEvent{
type: type, location: location, voyage: voyage} | handling_events],
%DeliveryHistory{
transportation_status: trans_status} = delivery_history) do
new_trans_status = Shipping.next_trans_status(type, trans_status)
new_location = if type == "LOAD", do: "ON VESSEL VOYAGE #{voyage}", else: location
update_delivery_history(handling_events,
%{delivery_history | :transportation_status => new_trans_status,
:location => new_location})
end
defp update_delivery_history([], delivery_history) do
delivery_history
end
end
| 30.722222 | 94 | 0.681736 |
ff6818f57cacff53e58dc12c538f1afb3d1ab155 | 2,063 | ex | Elixir | lib/blog_web/controllers/post_controller.ex | kadmohardy/Blog | 755bfc4325c899166b7c865a60060fc4355e7d15 | [
"MIT"
] | null | null | null | lib/blog_web/controllers/post_controller.ex | kadmohardy/Blog | 755bfc4325c899166b7c865a60060fc4355e7d15 | [
"MIT"
] | null | null | null | lib/blog_web/controllers/post_controller.ex | kadmohardy/Blog | 755bfc4325c899166b7c865a60060fc4355e7d15 | [
"MIT"
] | null | null | null | defmodule BlogWeb.PostController do
use BlogWeb, :controller
alias Blog.{Posts, Posts.Post}
plug BlogWeb.Plug.RequireAuth when action in [:create, :new, :edit, :update, :delete]
plug :check_owner when action in [:edit, :update, :delete]
def index(conn, _params) do
posts = Posts.list_posts(conn.assigns[:user].id)
render(conn, "index.html", posts: posts)
end
def show(conn, %{"id" => id}) do
post = Posts.get_post!(id)
render(conn, "show.html", post: post)
end
def new(conn, _params) do
changeset = Post.changeset(%Post{})
render(conn, "new.html", changeset: changeset)
end
def edit(conn, %{"id" => id}) do
post = Posts.get_post!(id)
changeset = Post.changeset(post)
render(conn, "edit.html", post: post, changeset: changeset)
end
def create(conn, %{"post" => post}) do
case Posts.create_post(conn.assigns[:user], post) do
{:ok, post} ->
conn
|> put_flash(:info, "Post created successfuly!")
|> redirect(to: Routes.post_path(conn, :show, post))
{:error, changeset} ->
render(conn, "new.html", changeset: changeset)
end
end
def delete(conn, %{"id" => id}) do
Posts.delete_post(id)
conn
|> put_flash(:info, "Post foi deletado")
|> redirect(to: Routes.post_path(conn, :index))
end
def update(conn, %{"id" => id, "post" => post_params}) do
post = Posts.get_post!(id)
case Posts.update_post(post, post_params) do
{:ok, post} ->
conn
|> put_flash(:info, "Post created successfuly!")
|> redirect(to: Routes.post_path(conn, :show, post))
{:error, changeset} ->
render(conn, "edit.html", changeset: changeset, post: post)
end
end
def check_owner(conn, _) do
%{params: %{"id" => post_id}} = conn
if Posts.get_post!(post_id).user_id == conn.assigns.user.id do
conn
else
conn
|> put_flash(:info, "Você não possui permissão para esta operação")
|> redirect(to: Routes.post_path(conn, :index))
|> halt()
end
end
end
| 26.792208 | 87 | 0.615124 |
ff68259813ace3de5fbec536515bf3ae4ce44844 | 489 | ex | Elixir | lib/devito_web/views/error_view.ex | supersimple/devito | 9b976849d934a9551450ca0459859e76c0929893 | [
"Apache-2.0"
] | 37 | 2020-07-23T16:05:32.000Z | 2022-02-04T07:06:17.000Z | lib/devito_web/views/error_view.ex | supersimple/devito | 9b976849d934a9551450ca0459859e76c0929893 | [
"Apache-2.0"
] | 1 | 2020-08-03T03:58:57.000Z | 2020-08-05T03:06:37.000Z | lib/devito_web/views/error_view.ex | supersimple/devito | 9b976849d934a9551450ca0459859e76c0929893 | [
"Apache-2.0"
] | 3 | 2020-07-24T22:43:08.000Z | 2021-05-30T17:43:45.000Z | defmodule DevitoWeb.ErrorView do
use DevitoWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 28.764706 | 61 | 0.734151 |
ff684137b317bbebf425bba2e961948e3aea63ef | 8,388 | ex | Elixir | lib/live_sup_web/live/widgets/blameless/blameless_current_incidents_live.ex | livesup-dev/livesup | eaf9ffc78d3043bd9e3408f0f4df26ed16eb8446 | [
"Apache-2.0",
"MIT"
] | null | null | null | lib/live_sup_web/live/widgets/blameless/blameless_current_incidents_live.ex | livesup-dev/livesup | eaf9ffc78d3043bd9e3408f0f4df26ed16eb8446 | [
"Apache-2.0",
"MIT"
] | 3 | 2022-02-23T15:51:48.000Z | 2022-03-14T22:52:43.000Z | lib/live_sup_web/live/widgets/blameless/blameless_current_incidents_live.ex | livesup-dev/livesup | eaf9ffc78d3043bd9e3408f0f4df26ed16eb8446 | [
"Apache-2.0",
"MIT"
] | null | null | null | defmodule LiveSupWeb.Live.Widgets.Blameless.CurrentIncidentsLive do
use LiveSupWeb.Live.Widgets.WidgetLive
@impl true
def render_widget(assigns) do
~H"""
<.live_component module={SmartRenderComponent} id={@widget_data.id} let={widget_data} widget_data={@widget_data}>
<!-- Current Incidents -->
<.live_component module={WidgetHeaderComponent} id={"#{widget_data.id}-header"} widget_data={widget_data} />
<!-- Widget Content -->
<div class="min-h-[132px] items-center rounded-md bg-white p-2 dark:bg-darker">
<%= if Enum.any?(widget_data.data) do %>
<%= for {incident, _counter} <- Enum.with_index(widget_data.data) do %>
<div class="relative shadow-md shadow-red-500 ring-1 ring-red-500 rounded-lg p-4 my-4">
<span class="absolute -right-1 -top-1 flex h-3 w-3">
<span class="animate-ping absolute inline-flex h-full w-full rounded-full bg-red-400 opacity-75"></span>
<span class="relative inline-flex rounded-full h-3 w-3 bg-red-500"></span>
</span>
<h3 class="text-lg mb-4">
<a href={incident[:url]} class="hover:underline text-black dark:text-primary block" target="_blank"><%= incident[:description] %></a>
<span class="font-mono text-xs text-black dark:text-white block"><%= incident[:created_at_ago] %></span>
</h3>
<div class="flex flex-row gap-4">
<div class="flex flex-col gap-4 w-1/2">
<div>
<p class="uppercase font-medium text-sm text-gray-500">Severity</p>
<p class="text-base">
<%= incident[:severity] %>
</p>
</div>
<div>
<p class="uppercase font-medium text-sm text-gray-500">Commander</p>
<%= if incident.commander && incident.commander[:full_name] do %>
<div class="flex">
<img src={incident.commander[:avatar_url]} class="w-6 h-6 rounded-full transition-opacity duration-200 flex-none mr-2"/>
<p class="text-base flex-grow">
<span class="block"><%= incident.commander[:full_name] %></span>
<span class="text-xs block">(<%= incident.commander[:title] %>)</span>
</p>
</div>
<% else %>
<p class="text-base">N/A</p>
<% end %>
</div>
</div>
<div class="flex flex-col gap-4 w-1/2">
<div>
<p class="uppercase font-medium text-sm text-gray-500">Status</p>
<p class="text-base">
<svg id="emYLh5A2QIZ1" class="w-3 h-3 inline " xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 3 3" shape-rendering="geometricPrecision" text-rendering="geometricPrecision"><ellipse rx="1.5" ry="1.5" transform="translate(1.5 1.5)" class={"fill-yellow-300 fill-incident-#{String.downcase(incident[:status])}"} stroke-width="0"/></svg>
<span class="text-base align-middle"><%= incident[:status] %></span>
</p>
</div>
<div>
<p class="uppercase font-medium text-sm text-gray-500">War Room</p>
<p class="text-base">
<%= if incident.slack && incident.slack[:url] do %>
<span class="font-mono text-sm text-black dark:text-white block">
<a href={incident.slack[:url]} target="_blank" class="hover:underline text-black dark:text-primary inline-block">
<svg class="w-4 h-4 rounded-full inline-block mr-2" version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="60 60 140 140" style="enable-background:new 60 60 140 140; display:inline-block;" xml:space="preserve">
<style type="text/css">
.st0{fill:#E01E5A;}
.st1{fill:#36C5F0;}
.st2{fill:#2EB67D;}
.st3{fill:#ECB22E;}
</style>
<g>
<g>
<path class="st0" d="M99.4,151.2c0,7.1-5.8,12.9-12.9,12.9c-7.1,0-12.9-5.8-12.9-12.9c0-7.1,5.8-12.9,12.9-12.9h12.9V151.2z"/>
<path class="st0" d="M105.9,151.2c0-7.1,5.8-12.9,12.9-12.9s12.9,5.8,12.9,12.9v32.3c0,7.1-5.8,12.9-12.9,12.9
s-12.9-5.8-12.9-12.9V151.2z"/>
</g>
<g>
<path class="st1" d="M118.8,99.4c-7.1,0-12.9-5.8-12.9-12.9c0-7.1,5.8-12.9,12.9-12.9s12.9,5.8,12.9,12.9v12.9H118.8z"/>
<path class="st1" d="M118.8,105.9c7.1,0,12.9,5.8,12.9,12.9s-5.8,12.9-12.9,12.9H86.5c-7.1,0-12.9-5.8-12.9-12.9
s5.8-12.9,12.9-12.9H118.8z"/>
</g>
<g>
<path class="st2" d="M170.6,118.8c0-7.1,5.8-12.9,12.9-12.9c7.1,0,12.9,5.8,12.9,12.9s-5.8,12.9-12.9,12.9h-12.9V118.8z"/>
<path class="st2" d="M164.1,118.8c0,7.1-5.8,12.9-12.9,12.9c-7.1,0-12.9-5.8-12.9-12.9V86.5c0-7.1,5.8-12.9,12.9-12.9
c7.1,0,12.9,5.8,12.9,12.9V118.8z"/>
</g>
<g>
<path class="st3" d="M151.2,170.6c7.1,0,12.9,5.8,12.9,12.9c0,7.1-5.8,12.9-12.9,12.9c-7.1,0-12.9-5.8-12.9-12.9v-12.9H151.2z"/>
<path class="st3" d="M151.2,164.1c-7.1,0-12.9-5.8-12.9-12.9c0-7.1,5.8-12.9,12.9-12.9h32.3c7.1,0,12.9,5.8,12.9,12.9
c0,7.1-5.8,12.9-12.9,12.9H151.2z"/>
</g>
</g>
</svg><%= incident.slack[:channel] %>
</a>
</span>
<% end %>
</p>
</div>
</div>
</div>
</div>
<% end %>
<% else %>
<svg class="h-20 w-20 m-auto" viewBox="0 0 81 80" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M66.5 34.6667C66.8333 36.3334 67.1667 38.3334 67.1667 40.0001C67.1667 54.6668 55.1667 66.6668 40.5 66.6668C25.8333 66.6668 13.8333 54.6668 13.8333 40.0001C13.8333 25.3334 25.8333 13.3334 40.5 13.3334C45.8333 13.3334 51.1667 15.0001 55.1667 17.6667L59.8333 13.0001C54.5 9.00008 47.8333 6.66675 40.5 6.66675C22.1667 6.66675 7.16666 21.6667 7.16666 40.0001C7.16666 58.3334 22.1667 73.3334 40.5 73.3334C58.8333 73.3334 73.8333 58.3334 73.8333 40.0001C73.8333 36.3334 73.1667 32.6667 72.1667 29.3334L66.5 34.6667Z" fill="#21D3EE"/>
<path d="M37.1667 54.6667L21.5 39L26.1667 34.3333L37.1667 45.3333L71.5 11L76.1667 15.6667L37.1667 54.6667Z" fill="#21D3EE"/>
<path opacity="0.3" d="M40.5 6.66675C31.6594 6.66675 23.181 10.1786 16.9298 16.4299C10.6786 22.6811 7.16666 31.1595 7.16666 40.0001C7.16666 48.8406 10.6786 57.3191 16.9298 63.5703C23.181 69.8215 31.6594 73.3334 40.5 73.3334C49.3405 73.3334 57.819 69.8215 64.0702 63.5703C70.3214 57.3191 73.8333 48.8406 73.8333 40.0001C73.8333 31.1595 70.3214 22.6811 64.0702 16.4299C57.819 10.1786 49.3405 6.66675 40.5 6.66675Z" fill="#21D3EE"/>
</svg>
<p class="text-center m-2">No Incidents to Display</p>
<% end %>
</div>
<!-- /Widget Content -->
<.live_component module={WidgetFooterComponent} id={"#{widget_data.id}-footer"} widget_data={widget_data} />
<!-- /Current Incidents -->
</.live_component>
"""
end
end
| 70.487395 | 549 | 0.487601 |
ff6868353faa7b181fd75c023958ecdc3498ae64 | 1,902 | ex | Elixir | clients/content/lib/google_api/content/v2/model/product_amount.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/product_amount.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/product_amount.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Content.V2.Model.ProductAmount do
@moduledoc """
## Attributes
- priceAmount (Price): The pre-tax or post-tax price depending on the location of the order. Defaults to: `null`.
- remittedTaxAmount (Price): Remitted tax value. Defaults to: `null`.
- taxAmount (Price): Tax value. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:priceAmount => GoogleApi.Content.V2.Model.Price.t(),
:remittedTaxAmount => GoogleApi.Content.V2.Model.Price.t(),
:taxAmount => GoogleApi.Content.V2.Model.Price.t()
}
field(:priceAmount, as: GoogleApi.Content.V2.Model.Price)
field(:remittedTaxAmount, as: GoogleApi.Content.V2.Model.Price)
field(:taxAmount, as: GoogleApi.Content.V2.Model.Price)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.ProductAmount do
def decode(value, options) do
GoogleApi.Content.V2.Model.ProductAmount.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.ProductAmount do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.222222 | 115 | 0.736067 |
ff687ac135e0cf47b2eae042e3bd6ac459901d73 | 6,827 | ex | Elixir | lib/mix/tasks/hydrate_clickhouse.ex | lizlam/plausible | 886ba62cd814e5ca2d05c51a375bccc753c7c6ff | [
"MIT"
] | null | null | null | lib/mix/tasks/hydrate_clickhouse.ex | lizlam/plausible | 886ba62cd814e5ca2d05c51a375bccc753c7c6ff | [
"MIT"
] | null | null | null | lib/mix/tasks/hydrate_clickhouse.ex | lizlam/plausible | 886ba62cd814e5ca2d05c51a375bccc753c7c6ff | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.HydrateClickhouse do
use Mix.Task
use Plausible.Repo
require Logger
@hash_key Keyword.fetch!(Application.get_env(:plausible, PlausibleWeb.Endpoint), :secret_key_base) |> binary_part(0, 16)
def run(args) do
Application.ensure_all_started(:db_connection)
Application.ensure_all_started(:hackney)
clickhouse_config = Application.get_env(:plausible, :clickhouse)
Clickhousex.start_link(Keyword.merge([scheme: :http, port: 8123, name: :clickhouse], clickhouse_config))
Ecto.Migrator.with_repo(Plausible.Repo, fn repo ->
execute(repo, args)
end)
end
def execute(repo, _args \\ []) do
create_events()
create_sessions()
hydrate_events(repo)
end
def create_events() do
ddl = """
CREATE TABLE IF NOT EXISTS events (
timestamp DateTime,
name String,
domain String,
user_id UInt64,
session_id UInt64,
hostname String,
pathname String,
referrer String,
referrer_source String,
initial_referrer String,
initial_referrer_source String,
country_code LowCardinality(FixedString(2)),
screen_size LowCardinality(String),
operating_system LowCardinality(String),
browser LowCardinality(String)
) ENGINE = MergeTree()
PARTITION BY toYYYYMM(timestamp)
ORDER BY (name, domain, user_id, timestamp)
SETTINGS index_granularity = 8192
"""
Clickhousex.query(:clickhouse, ddl, [])
|> log
end
def create_sessions() do
ddl = """
CREATE TABLE IF NOT EXISTS sessions (
session_id UInt64,
sign Int8,
domain String,
user_id UInt64,
hostname String,
timestamp DateTime,
start DateTime,
is_bounce UInt8,
entry_page String,
exit_page String,
pageviews Int32,
events Int32,
duration UInt32,
referrer String,
referrer_source String,
country_code LowCardinality(FixedString(2)),
screen_size LowCardinality(String),
operating_system LowCardinality(String),
browser LowCardinality(String)
) ENGINE = CollapsingMergeTree(sign)
PARTITION BY toYYYYMM(start)
ORDER BY (domain, user_id, session_id, start)
SETTINGS index_granularity = 8192
"""
Clickhousex.query(:clickhouse, ddl, [])
|> log
end
def chunk_query(queryable, chunk_size, repo) do
chunk_stream = Stream.unfold(0, fn page_number ->
offset = chunk_size * page_number
page = from(
q in queryable,
offset: ^offset,
limit: ^chunk_size
) |> repo.all(timeout: :infinity)
{page, page_number + 1}
end)
Stream.take_while(chunk_stream, fn [] -> false; _ -> true end)
end
def escape_quote(s) do
String.replace(s, "'", "''")
end
def hydrate_events(repo, _args \\ []) do
end_time = ~N[2020-05-21 10:46:51]
total = Repo.aggregate(from(e in Plausible.Event, where: e.timestamp < ^end_time), :count, :id)
event_chunks = from(
e in Plausible.Event,
where: e.timestamp < ^end_time,
order_by: e.id
) |> chunk_query(50_000, repo)
Enum.reduce(event_chunks, {%{}, 0}, fn events, {session_cache, processed_events} ->
{session_cache, sessions, events} = Enum.reduce(events, {session_cache, [], []}, fn event, {session_cache, sessions, new_events} ->
found_session = session_cache[event.fingerprint]
active = is_active?(found_session, event)
user_id = SipHash.hash!(@hash_key, event.fingerprint)
clickhouse_event = struct(Plausible.ClickhouseEvent, Map.from_struct(event) |> Map.put(:user_id, user_id))
cond do
found_session && active ->
new_session = update_session(found_session, clickhouse_event)
{
Map.put(session_cache, event.fingerprint, new_session),
[%{new_session | sign: 1}, %{found_session | sign: -1} | sessions],
new_events ++ [%{clickhouse_event | session_id: new_session.session_id}]
}
found_session && !active ->
new_session = new_session_from_event(clickhouse_event)
{
Map.put(session_cache, event.fingerprint, new_session),
[new_session | sessions],
new_events ++ [%{clickhouse_event | session_id: new_session.session_id}]
}
true ->
new_session = new_session_from_event(clickhouse_event)
{
Map.put(session_cache, event.fingerprint, new_session),
[new_session | sessions],
new_events ++ [%{clickhouse_event | session_id: new_session.session_id}]
}
end
end)
Plausible.Clickhouse.insert_events(events)
Plausible.Clickhouse.insert_sessions(sessions)
session_cache = clean(session_cache, List.last(events).timestamp)
new_processed_count = processed_events + Enum.count(events)
IO.puts("Processed #{new_processed_count} out of #{total} (#{round(new_processed_count / total * 100)}%)")
{session_cache, processed_events + Enum.count(events)}
end)
end
defp clean(session_cache, latest_timestamp) do
cleaned = Enum.reduce(session_cache, %{}, fn {key, session}, acc ->
if Timex.diff(latest_timestamp, session.timestamp, :second) <= 3600 do
Map.put(acc, key, session)
else
acc # forget the session
end
end)
n_old = Enum.count(session_cache)
n_new = Enum.count(cleaned)
IO.puts("Removed #{n_old - n_new} sessions from store")
cleaned
end
defp is_active?(session, event) do
session && Timex.diff(event.timestamp, session.timestamp, :minute) <= 29
end
defp update_session(session, event) do
%{session | timestamp: event.timestamp, exit_page: event.pathname, is_bounce: false, duration: Timex.diff(event.timestamp, session.start, :second), pageviews: (if event.name == "pageview", do: session.pageviews + 1, else: session.pageviews), events: session.events + 1}
end
defp new_session_from_event(event) do
%Plausible.ClickhouseSession{
sign: 1,
session_id: Plausible.ClickhouseSession.random_uint64(),
hostname: event.hostname,
domain: event.domain,
user_id: event.user_id,
entry_page: event.pathname,
exit_page: event.pathname,
is_bounce: true,
duration: 0,
pageviews: (if event.name == "pageview", do: 1, else: 0),
events: 1,
referrer: event.referrer,
referrer_source: event.referrer_source,
country_code: event.country_code,
operating_system: event.operating_system,
browser: event.browser,
timestamp: event.timestamp,
start: event.timestamp
}
end
defp log({:ok, res}), do: Logger.info("#{inspect res}")
defp log({:error, e}), do: Logger.error("[ERROR] #{inspect e}")
end
| 33.79703 | 273 | 0.6549 |
ff68bc82dbc05db836e36362f4b7ef71e5b57244 | 437 | ex | Elixir | lib/history/sources/gdax.ex | fremantle-industries/history | a8a33744279ff4ca62620785f9a2e9c0c99e4de7 | [
"MIT"
] | 20 | 2021-08-06T01:09:48.000Z | 2022-03-28T18:44:56.000Z | lib/history/sources/gdax.ex | fremantle-industries/history | a8a33744279ff4ca62620785f9a2e9c0c99e4de7 | [
"MIT"
] | 13 | 2021-08-21T21:17:02.000Z | 2022-03-27T06:33:51.000Z | lib/history/sources/gdax.ex | fremantle-industries/history | a8a33744279ff4ca62620785f9a2e9c0c99e4de7 | [
"MIT"
] | 2 | 2021-09-23T11:31:59.000Z | 2022-01-09T16:19:35.000Z | defmodule History.Sources.Gdax do
alias History.Sources.NoOp
@behaviour History.DataAdapter
@impl true
def trades, do: NoOp.Trades
@impl true
def candles, do: NoOp.Candles
@impl true
def liquidations, do: NoOp.Liquidations
@impl true
def funding_rates, do: NoOp.FundingRates
@impl true
def predicted_funding_rates, do: NoOp.PredictedFundingRates
@impl true
def lending_rates, do: NoOp.LendingRates
end
| 18.208333 | 61 | 0.750572 |
ff68fb66df1a2b07c949179a5019e8e4cf4678fe | 80 | exs | Elixir | test/reactor_web/views/page_view_test.exs | jeantsai/phoenix-react-starter | dd0e048285134492dad160f1f2a8d942648acba9 | [
"MIT"
] | null | null | null | test/reactor_web/views/page_view_test.exs | jeantsai/phoenix-react-starter | dd0e048285134492dad160f1f2a8d942648acba9 | [
"MIT"
] | null | null | null | test/reactor_web/views/page_view_test.exs | jeantsai/phoenix-react-starter | dd0e048285134492dad160f1f2a8d942648acba9 | [
"MIT"
] | null | null | null | defmodule ReactorWeb.PageViewTest do
use ReactorWeb.ConnCase, async: true
end
| 20 | 38 | 0.825 |
ff6970fa0f3ec1ddcbe9d36b9e5b7cdc33b190da | 82 | ex | Elixir | lib/web/views/manage/hosted_site_view.ex | sb8244/grapevine | effaaa01294d30114090c20f9cc40b8665d834f2 | [
"MIT"
] | 107 | 2018-10-05T18:20:32.000Z | 2022-02-28T04:02:50.000Z | lib/web/views/manage/hosted_site_view.ex | sb8244/grapevine | effaaa01294d30114090c20f9cc40b8665d834f2 | [
"MIT"
] | 33 | 2018-10-05T14:11:18.000Z | 2022-02-10T22:19:18.000Z | lib/web/views/manage/hosted_site_view.ex | sb8244/grapevine | effaaa01294d30114090c20f9cc40b8665d834f2 | [
"MIT"
] | 18 | 2019-02-03T03:08:20.000Z | 2021-12-28T04:29:36.000Z | defmodule Web.Manage.HostedSiteView do
use Web, :view
alias Web.FormView
end
| 13.666667 | 38 | 0.768293 |
ff697e5a7d01c11285cf54ce137cf27c17f887e4 | 291 | ex | Elixir | bot/lib/bot.ex | hectorip/Facebook_Bot_Elixir | 097967c985f2b48c7fc9d67979fed9b4821c888b | [
"Apache-2.0"
] | 1 | 2016-05-25T04:39:09.000Z | 2016-05-25T04:39:09.000Z | bot/lib/bot.ex | hectorip/Facebook_Bot_Elixir | 097967c985f2b48c7fc9d67979fed9b4821c888b | [
"Apache-2.0"
] | null | null | null | bot/lib/bot.ex | hectorip/Facebook_Bot_Elixir | 097967c985f2b48c7fc9d67979fed9b4821c888b | [
"Apache-2.0"
] | null | null | null | defmodule Bot do
use Application
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
worker(Bot.Server, [System.get_env("PORT")])
]
opts = [strategy: :one_for_one, name: Bot.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 19.4 | 57 | 0.670103 |
ff698554456ad2247d08613bd7aa089048116cf2 | 1,013 | ex | Elixir | test/support/channel_case.ex | thatbraxguy/real-time-quill | 288df903ddada42695c68fd0674e5976a4a848a9 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | thatbraxguy/real-time-quill | 288df903ddada42695c68fd0674e5976a4a848a9 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | thatbraxguy/real-time-quill | 288df903ddada42695c68fd0674e5976a4a848a9 | [
"MIT"
] | null | null | null | defmodule Quickquill.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
alias Quickquill.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
# The default endpoint for testing
@endpoint Quickquill.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Quickquill.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Quickquill.Repo, {:shared, self()})
end
:ok
end
end
| 23.022727 | 72 | 0.704837 |
ff69959173e02ad2807a8ab7c871a08849c7a4b5 | 765 | exs | Elixir | test/interactors/validate_password_test.exs | moxley/addict | 9271c60d9a862edcefc31e8a764b3eb5a5905171 | [
"MIT"
] | 750 | 2015-01-18T23:00:36.000Z | 2021-03-24T22:11:09.000Z | test/interactors/validate_password_test.exs | moxley/addict | 9271c60d9a862edcefc31e8a764b3eb5a5905171 | [
"MIT"
] | 130 | 2015-01-19T12:39:42.000Z | 2021-09-28T22:40:52.000Z | test/interactors/validate_password_test.exs | moxley/addict | 9271c60d9a862edcefc31e8a764b3eb5a5905171 | [
"MIT"
] | 151 | 2015-01-19T09:24:44.000Z | 2020-09-21T13:52:46.000Z | defmodule ValidatePasswordTest do
alias Addict.Interactors.ValidatePassword
use ExUnit.Case, async: true
defmodule Addict.PasswordUser do
use Ecto.Schema
schema "users" do
field :password, :string
field :email, :string
end
end
test "it passes on happy path" do
changeset = %Addict.PasswordUser{} |> Ecto.Changeset.cast(%{password: "one passphrase"}, ~w(password),[])
{:ok, errors} = ValidatePassword.call(changeset, [])
assert errors == []
end
test "it validates the default use case" do
changeset = %Addict.PasswordUser{} |> Ecto.Changeset.cast(%{password: "123"}, ~w(password),[])
{:error, errors} = ValidatePassword.call(changeset, [])
assert errors == [password: {"is too short", []}]
end
end
| 29.423077 | 109 | 0.669281 |
ff69b90a5804a8c99053142e3bf60a46060aa042 | 75 | ex | Elixir | lib/example_app_web/views/admin/layout_view.ex | elixirasturias/example-app | d68d3c141446cf81a9181198b22d51a41b26ce71 | [
"MIT"
] | 3 | 2018-05-31T13:06:48.000Z | 2020-01-14T03:29:30.000Z | lib/example_app_web/views/admin/layout_view.ex | elixirasturias/example-app | d68d3c141446cf81a9181198b22d51a41b26ce71 | [
"MIT"
] | null | null | null | lib/example_app_web/views/admin/layout_view.ex | elixirasturias/example-app | d68d3c141446cf81a9181198b22d51a41b26ce71 | [
"MIT"
] | null | null | null | defmodule ExampleAppWeb.Admin.LayoutView do
use ExampleAppWeb, :view
end
| 18.75 | 43 | 0.826667 |
ff6a16aace8390405962bd03b9204ebc224a5083 | 601 | exs | Elixir | examples/multiple_bars.exs | kianmeng/owl | 55c0f31438866d79526d1feab44e23728ad88dd6 | [
"Apache-2.0"
] | 123 | 2021-12-15T22:42:04.000Z | 2022-03-30T22:21:29.000Z | examples/multiple_bars.exs | kianmeng/owl | 55c0f31438866d79526d1feab44e23728ad88dd6 | [
"Apache-2.0"
] | 2 | 2021-12-16T16:42:24.000Z | 2022-01-25T23:47:17.000Z | examples/multiple_bars.exs | kianmeng/owl | 55c0f31438866d79526d1feab44e23728ad88dd6 | [
"Apache-2.0"
] | 1 | 2022-01-25T14:02:40.000Z | 2022-01-25T14:02:40.000Z | 1..10
|> Enum.map(fn index ->
Task.async(fn ->
range = 1..Enum.random(100..300)
label = "Demo Progress ##{index}"
Owl.ProgressBar.start(
id: {:demo, index},
label: label,
total: range.last,
timer: true,
bar_width_ratio: 0.3,
filled_symbol: Owl.Data.tag("#", :red),
empty_symbol: Owl.Data.tag("-", :light_black),
partial_symbols: []
)
Enum.each(range, fn _ ->
Process.sleep(Enum.random(10..50))
Owl.ProgressBar.inc(id: {:demo, index})
end)
end)
end)
|> Task.await_many(:infinity)
Owl.LiveScreen.await_render()
| 21.464286 | 52 | 0.585691 |
ff6a324d1a9f126bf3469aaec2bf1c1d21c6a9fa | 323 | exs | Elixir | config/osd32mp1.exs | trarbr/nerves_livebook | ac5a5f7f8b80fb0c63cfe81e565c439e912973dc | [
"Apache-2.0"
] | 51 | 2021-09-21T12:23:41.000Z | 2022-03-31T08:37:17.000Z | config/osd32mp1.exs | trarbr/nerves_livebook | ac5a5f7f8b80fb0c63cfe81e565c439e912973dc | [
"Apache-2.0"
] | 37 | 2021-09-21T11:35:28.000Z | 2022-03-18T13:00:31.000Z | config/osd32mp1.exs | trarbr/nerves_livebook | ac5a5f7f8b80fb0c63cfe81e565c439e912973dc | [
"Apache-2.0"
] | 7 | 2021-09-26T22:33:35.000Z | 2022-02-20T10:59:29.000Z | import Config
# Configure the network using vintage_net
# See https://github.com/nerves-networking/vintage_net for more information
config :vintage_net,
config: [
{"usb0", %{type: VintageNetDirect}},
{"eth0", %{type: VintageNetEthernet, ipv4: %{method: :dhcp}}}
]
# TBD
config :nerves_livebook, :ui, led: nil
| 24.846154 | 75 | 0.705882 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.