code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule AWS.WorkMail do
@moduledoc """
Amazon WorkMail is a secure, managed business email and calendaring service with
support for existing desktop and mobile email clients.
You can access your email, contacts, and calendars using Microsoft Outlook, your
browser, or other native iOS and Android email applications. You can integrate
WorkMail with your existing corporate directory and control both the keys that
encrypt your data and the location in which your data is stored.
The WorkMail API is designed for the following scenarios:
* Listing and describing organizations
* Managing users
* Managing groups
* Managing resources
All WorkMail API operations are Amazon-authenticated and certificate-signed.
They not only require the use of the AWS SDK, but also allow for the exclusive
use of AWS Identity and Access Management users and roles to help facilitate
access, trust, and permission policies. By creating a role and allowing an IAM
user to access the WorkMail site, the IAM user gains full administrative
visibility into the entire WorkMail organization (or as set in the IAM policy).
This includes, but is not limited to, the ability to create, update, and delete
users, groups, and resources. This allows developers to perform the scenarios
listed above, as well as give users the ability to grant access on a selective
basis using the IAM model.
"""
@doc """
Adds a member (user or group) to the resource's set of delegates.
"""
def associate_delegate_to_resource(client, input, options \\ []) do
request(client, "AssociateDelegateToResource", input, options)
end
@doc """
Adds a member (user or group) to the group's set.
"""
def associate_member_to_group(client, input, options \\ []) do
request(client, "AssociateMemberToGroup", input, options)
end
@doc """
Cancels a mailbox export job.
If the mailbox export job is near completion, it might not be possible to cancel
it.
"""
def cancel_mailbox_export_job(client, input, options \\ []) do
request(client, "CancelMailboxExportJob", input, options)
end
@doc """
Adds an alias to the set of a given member (user or group) of Amazon WorkMail.
"""
def create_alias(client, input, options \\ []) do
request(client, "CreateAlias", input, options)
end
@doc """
Creates a group that can be used in Amazon WorkMail by calling the
`RegisterToWorkMail` operation.
"""
def create_group(client, input, options \\ []) do
request(client, "CreateGroup", input, options)
end
@doc """
Creates a new Amazon WorkMail resource.
"""
def create_resource(client, input, options \\ []) do
request(client, "CreateResource", input, options)
end
@doc """
Creates a user who can be used in Amazon WorkMail by calling the
`RegisterToWorkMail` operation.
"""
def create_user(client, input, options \\ []) do
request(client, "CreateUser", input, options)
end
@doc """
Deletes an access control rule for the specified WorkMail organization.
"""
def delete_access_control_rule(client, input, options \\ []) do
request(client, "DeleteAccessControlRule", input, options)
end
@doc """
Remove one or more specified aliases from a set of aliases for a given user.
"""
def delete_alias(client, input, options \\ []) do
request(client, "DeleteAlias", input, options)
end
@doc """
Deletes a group from Amazon WorkMail.
"""
def delete_group(client, input, options \\ []) do
request(client, "DeleteGroup", input, options)
end
@doc """
Deletes permissions granted to a member (user or group).
"""
def delete_mailbox_permissions(client, input, options \\ []) do
request(client, "DeleteMailboxPermissions", input, options)
end
@doc """
Deletes the specified resource.
"""
def delete_resource(client, input, options \\ []) do
request(client, "DeleteResource", input, options)
end
@doc """
Deletes the specified retention policy from the specified organization.
"""
def delete_retention_policy(client, input, options \\ []) do
request(client, "DeleteRetentionPolicy", input, options)
end
@doc """
Deletes a user from Amazon WorkMail and all subsequent systems.
Before you can delete a user, the user state must be `DISABLED`. Use the
`DescribeUser` action to confirm the user state.
Deleting a user is permanent and cannot be undone. WorkMail archives user
mailboxes for 30 days before they are permanently removed.
"""
def delete_user(client, input, options \\ []) do
request(client, "DeleteUser", input, options)
end
@doc """
Mark a user, group, or resource as no longer used in Amazon WorkMail.
This action disassociates the mailbox and schedules it for clean-up. WorkMail
keeps mailboxes for 30 days before they are permanently removed. The
functionality in the console is *Disable*.
"""
def deregister_from_work_mail(client, input, options \\ []) do
request(client, "DeregisterFromWorkMail", input, options)
end
@doc """
Returns the data available for the group.
"""
def describe_group(client, input, options \\ []) do
request(client, "DescribeGroup", input, options)
end
@doc """
Describes the current status of a mailbox export job.
"""
def describe_mailbox_export_job(client, input, options \\ []) do
request(client, "DescribeMailboxExportJob", input, options)
end
@doc """
Provides more information regarding a given organization based on its
identifier.
"""
def describe_organization(client, input, options \\ []) do
request(client, "DescribeOrganization", input, options)
end
@doc """
Returns the data available for the resource.
"""
def describe_resource(client, input, options \\ []) do
request(client, "DescribeResource", input, options)
end
@doc """
Provides information regarding the user.
"""
def describe_user(client, input, options \\ []) do
request(client, "DescribeUser", input, options)
end
@doc """
Removes a member from the resource's set of delegates.
"""
def disassociate_delegate_from_resource(client, input, options \\ []) do
request(client, "DisassociateDelegateFromResource", input, options)
end
@doc """
Removes a member from a group.
"""
def disassociate_member_from_group(client, input, options \\ []) do
request(client, "DisassociateMemberFromGroup", input, options)
end
@doc """
Gets the effects of an organization's access control rules as they apply to a
specified IPv4 address, access protocol action, or user ID.
"""
def get_access_control_effect(client, input, options \\ []) do
request(client, "GetAccessControlEffect", input, options)
end
@doc """
Gets the default retention policy details for the specified organization.
"""
def get_default_retention_policy(client, input, options \\ []) do
request(client, "GetDefaultRetentionPolicy", input, options)
end
@doc """
Requests a user's mailbox details for a specified organization and user.
"""
def get_mailbox_details(client, input, options \\ []) do
request(client, "GetMailboxDetails", input, options)
end
@doc """
Lists the access control rules for the specified organization.
"""
def list_access_control_rules(client, input, options \\ []) do
request(client, "ListAccessControlRules", input, options)
end
@doc """
Creates a paginated call to list the aliases associated with a given entity.
"""
def list_aliases(client, input, options \\ []) do
request(client, "ListAliases", input, options)
end
@doc """
Returns an overview of the members of a group.
Users and groups can be members of a group.
"""
def list_group_members(client, input, options \\ []) do
request(client, "ListGroupMembers", input, options)
end
@doc """
Returns summaries of the organization's groups.
"""
def list_groups(client, input, options \\ []) do
request(client, "ListGroups", input, options)
end
@doc """
Lists the mailbox export jobs started for the specified organization within the
last seven days.
"""
def list_mailbox_export_jobs(client, input, options \\ []) do
request(client, "ListMailboxExportJobs", input, options)
end
@doc """
Lists the mailbox permissions associated with a user, group, or resource
mailbox.
"""
def list_mailbox_permissions(client, input, options \\ []) do
request(client, "ListMailboxPermissions", input, options)
end
@doc """
Returns summaries of the customer's organizations.
"""
def list_organizations(client, input, options \\ []) do
request(client, "ListOrganizations", input, options)
end
@doc """
Lists the delegates associated with a resource.
Users and groups can be resource delegates and answer requests on behalf of the
resource.
"""
def list_resource_delegates(client, input, options \\ []) do
request(client, "ListResourceDelegates", input, options)
end
@doc """
Returns summaries of the organization's resources.
"""
def list_resources(client, input, options \\ []) do
request(client, "ListResources", input, options)
end
@doc """
Lists the tags applied to an Amazon WorkMail organization resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Returns summaries of the organization's users.
"""
def list_users(client, input, options \\ []) do
request(client, "ListUsers", input, options)
end
@doc """
Adds a new access control rule for the specified organization.
The rule allows or denies access to the organization for the specified IPv4
addresses, access protocol actions, and user IDs. Adding a new rule with the
same name as an existing rule replaces the older rule.
"""
def put_access_control_rule(client, input, options \\ []) do
request(client, "PutAccessControlRule", input, options)
end
@doc """
Sets permissions for a user, group, or resource.
This replaces any pre-existing permissions.
"""
def put_mailbox_permissions(client, input, options \\ []) do
request(client, "PutMailboxPermissions", input, options)
end
@doc """
Puts a retention policy to the specified organization.
"""
def put_retention_policy(client, input, options \\ []) do
request(client, "PutRetentionPolicy", input, options)
end
@doc """
Registers an existing and disabled user, group, or resource for Amazon WorkMail
use by associating a mailbox and calendaring capabilities.
It performs no change if the user, group, or resource is enabled and fails if
the user, group, or resource is deleted. This operation results in the
accumulation of costs. For more information, see
[Pricing](https://aws.amazon.com/workmail/pricing). The equivalent console
functionality for this operation is *Enable*.
Users can either be created by calling the `CreateUser` API operation or they
can be synchronized from your directory. For more information, see
`DeregisterFromWorkMail`.
"""
def register_to_work_mail(client, input, options \\ []) do
request(client, "RegisterToWorkMail", input, options)
end
@doc """
Allows the administrator to reset the password for a user.
"""
def reset_password(client, input, options \\ []) do
request(client, "ResetPassword", input, options)
end
@doc """
Starts a mailbox export job to export MIME-format email messages and calendar
items from the specified mailbox to the specified Amazon Simple Storage Service
(Amazon S3) bucket.
For more information, see [Exporting mailbox content](https://docs.aws.amazon.com/workmail/latest/adminguide/mail-export.html)
in the *Amazon WorkMail Administrator Guide*.
"""
def start_mailbox_export_job(client, input, options \\ []) do
request(client, "StartMailboxExportJob", input, options)
end
@doc """
Applies the specified tags to the specified Amazon WorkMail organization
resource.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Untags the specified tags from the specified Amazon WorkMail organization
resource.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates a user's current mailbox quota for a specified organization and user.
"""
def update_mailbox_quota(client, input, options \\ []) do
request(client, "UpdateMailboxQuota", input, options)
end
@doc """
Updates the primary email for a user, group, or resource.
The current email is moved into the list of aliases (or swapped between an
existing alias and the current primary email), and the email provided in the
input is promoted as the primary.
"""
def update_primary_email_address(client, input, options \\ []) do
request(client, "UpdatePrimaryEmailAddress", input, options)
end
@doc """
Updates data for the resource.
To have the latest information, it must be preceded by a `DescribeResource`
call. The dataset in the request should be the one expected when performing
another `DescribeResource` call.
"""
def update_resource(client, input, options \\ []) do
request(client, "UpdateResource", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "workmail"}
host = build_host("workmail", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "WorkMailService.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/work_mail.ex
| 0.72526
| 0.47993
|
work_mail.ex
|
starcoder
|
defmodule Crawly.Manager do
@moduledoc """
Crawler manager module
This module is responsible for spawning all processes related to
a given Crawler.
The manager spawns the following processes tree.
ββββββββββββββββββ βββββββββββββββββββββ
β Crawly.Manager βββββββββ> Crawly.ManagerSup β
ββββββββββββββββββ βββββββββββ¬ββββββββββ
β |
β |
ββββββββββββββββββββββββββββ€
β β
β β
ββββββββββΌββββββββ βββββββββββΌββββββββ
β Worker1 β β Worker2 β
ββββββββββ¬ββββββββ ββββββββββ¬βββββββββ
β β
β β
β β
β β
ββββββββββΌββββββββββ ββββββββββββΌββββββββββββ
βCrawly.DataStorageβ βCrawly.RequestsStorageβ
ββββββββββββββββββββ ββββββββββββββββββββββββ
"""
require Logger
@timeout 60_000
use GenServer
alias Crawly.{Engine, Utils}
@spec add_workers(module(), non_neg_integer()) ::
:ok | {:error, :spider_non_exist}
def add_workers(spider_name, num_of_workers) do
case Engine.get_manager(spider_name) do
{:error, reason} ->
{:error, reason}
pid ->
GenServer.cast(pid, {:add_workers, num_of_workers})
end
end
def start_link(spider_name) do
Logger.debug("Starting the manager for #{spider_name}")
GenServer.start_link(__MODULE__, spider_name)
end
@impl true
def init(spider_name) do
# Getting spider start urls
[start_urls: urls] = spider_name.init()
# Start DataStorage worker
{:ok, data_storage_pid} = Crawly.DataStorage.start_worker(spider_name)
Process.link(data_storage_pid)
# Start RequestsWorker for a given spider
{:ok, request_storage_pid} =
Crawly.RequestsStorage.start_worker(spider_name)
Process.link(request_storage_pid)
# Store start requests
requests = Enum.map(urls, fn url -> Crawly.Request.new(url) end)
:ok = Crawly.RequestsStorage.store(spider_name, requests)
# Start workers
num_workers =
Utils.get_settings(:concurrent_requests_per_domain, spider_name, 4)
worker_pids =
Enum.map(1..num_workers, fn _x ->
DynamicSupervisor.start_child(
spider_name,
{Crawly.Worker, [spider_name]}
)
end)
Logger.debug(
"Started #{Enum.count(worker_pids)} workers for #{spider_name}"
)
# Schedule basic service operations for given spider manager
timeout =
Utils.get_settings(:manager_operations_timeout, spider_name, @timeout)
tref = Process.send_after(self(), :operations, timeout)
{:ok,
%{name: spider_name, tref: tref, prev_scraped_cnt: 0, workers: worker_pids}}
end
@impl true
def handle_cast({:add_workers, num_of_workers}, state) do
Logger.info("Adding #{num_of_workers} workers for #{state.name}")
Enum.each(1..num_of_workers, fn _ ->
DynamicSupervisor.start_child(state.name, {Crawly.Worker, [state.name]})
end)
{:noreply, state}
end
@impl true
def handle_info(:operations, state) do
Process.cancel_timer(state.tref)
# Close spider if required items count was reached.
{:stored_items, items_count} = Crawly.DataStorage.stats(state.name)
delta = items_count - state.prev_scraped_cnt
Logger.info("Current crawl speed is: #{delta} items/min")
itemcount_limit =
:closespider_itemcount
|> Utils.get_settings(state.name)
|> maybe_convert_to_integer()
maybe_stop_spider_by_itemcount_limit(
state.name,
items_count,
itemcount_limit
)
# Close spider in case if it's not scraping items fast enough
closespider_timeout_limit =
:closespider_timeout
|> Utils.get_settings(state.name)
|> maybe_convert_to_integer()
maybe_stop_spider_by_timeout(
state.name,
delta,
closespider_timeout_limit
)
tref =
Process.send_after(
self(),
:operations,
Utils.get_settings(:manager_operations_timeout, state.name, @timeout)
)
{:noreply, %{state | tref: tref, prev_scraped_cnt: items_count}}
end
defp maybe_stop_spider_by_itemcount_limit(spider_name, current, limit)
when current > limit do
Logger.info(
"Stopping #{inspect(spider_name)}, closespider_itemcount achieved"
)
Crawly.Engine.stop_spider(spider_name, :itemcount_limit)
end
defp maybe_stop_spider_by_itemcount_limit(_, _, _), do: :ok
defp maybe_stop_spider_by_timeout(spider_name, current, limit)
when current < limit and is_integer(limit) do
Logger.info("Stopping #{inspect(spider_name)}, itemcount timeout achieved")
Crawly.Engine.stop_spider(spider_name, :itemcount_timeout)
end
defp maybe_stop_spider_by_timeout(_, _, _), do: :ok
defp maybe_convert_to_integer(value) when is_atom(value), do: value
defp maybe_convert_to_integer(value) when is_binary(value),
do: String.to_integer(value)
defp maybe_convert_to_integer(value) when is_integer(value), do: value
end
|
lib/crawly/manager.ex
| 0.738386
| 0.490175
|
manager.ex
|
starcoder
|
defmodule JSONC.Transcoder do
import JSONC.Parser
@doc """
transcodes a jsonc document into a json document and raises in case of error
"""
def transcode!(content) when is_binary(content) do
case transcode(content) do
{:ok, result} -> result
{:error, reason} -> raise reason
end
end
@doc """
transcodes a jsonc document into a json document
"""
def transcode(content) when is_binary(content) do
case parse(content) do
{:ok, %{type: :root, value: value}} ->
{:ok, encode_value(value)}
{:error, reason} ->
{:error, reason}
end
end
defp encode_value(value, level \\ 1) when is_map(value) do
case value do
%{type: :object, value: object_node} ->
"{\n#{String.duplicate(" ", level * 4)}#{encode_object(object_node, level)}\n#{String.duplicate(" ", (level - 1) * 4)}}"
%{type: :array, value: array_node} ->
"[\n#{String.duplicate(" ", level * 4)}#{encode_array(array_node, level)}\n#{String.duplicate(" ", (level - 1) * 4)}]"
%{type: :number, subtype: :integer, value: integer} ->
Integer.to_string(integer)
%{type: :number, subtype: :float, value: float} ->
Float.to_string(float)
%{type: :string, subtype: _, value: string} ->
string =
string
|> String.replace("\n", "\\n")
|> String.replace("\t", "\\t")
|> String.replace("\r", "\\r")
|> String.replace("\v", "\\v")
|> String.replace("\b", "\\b")
|> String.replace("\f", "\\f")
"\"#{string}\""
%{type: :boolean, value: boolean} ->
boolean |> to_string()
%{type: nil} ->
"null"
end
end
defp encode_object(node, level) when is_map(node) do
node
|> Enum.sort(fn {_, v1}, {_, v2} -> sort_values(v1, v2) end)
|> Enum.map(fn {k, v} -> "\"#{k}\": #{encode_value(v, level + 1)}" end)
|> Enum.join(",\n#{String.duplicate(" ", level * 4)}")
end
defp encode_array(node, level) when is_list(node) do
node
|> Enum.sort(&sort_values/2)
|> Enum.map(fn v -> "#{encode_value(v, level + 1)}" end)
|> Enum.join(",\n#{String.duplicate(" ", level * 4)}")
end
defp sort_values(%{place: {v1_line, v1_column}}, %{place: {v2_line, v2_column}}) do
cond do
v1_line > v2_line ->
false
v1_line == v2_line ->
cond do
v1_column > v2_column -> false
true -> true
end
true ->
true
end
end
end
|
lib/transcoder.ex
| 0.636466
| 0.431105
|
transcoder.ex
|
starcoder
|
defmodule TeamCityExUnitFormatting do
# Constants
@root_parent_node_id 0
# Struct
defstruct failures_counter: 0,
invalids_counter: 0,
seed: nil,
skipped_counter: 0,
tests_counter: 0,
trace: false,
width: 80
# Functions
def new(opts) do
%__MODULE__{
seed: opts[:seed],
trace: opts[:trace]
}
end
# ExUnit.TestCase is deprecated in 1.6.0 and replaced by ExUnit.TestModule. ExUnit.TestCase will be removed in 2.0.0.
def put_event(state = %__MODULE__{}, {:case_finished, %ExUnit.TestCase{}}), do: state
def put_event(state = %__MODULE__{}, {:case_started, %ExUnit.TestCase{}}), do: state
def put_event(state = %__MODULE__{}, {:module_finished, test_module = %ExUnit.TestModule{}}) do
put_formatted(:test_suite_finished, attributes(test_module))
state
end
def put_event(state = %__MODULE__{}, {:module_started, test_module = %ExUnit.TestModule{}}) do
put_formatted(:test_suite_started, attributes(test_module))
state
end
def put_event(state = %__MODULE__{}, {:suite_finished, _run_us, _load_us}), do: state
def put_event(state = %__MODULE__{}, {:suite_started, opts}) do
seed = opts[:seed]
IO.puts("Suite started with seed #{seed}")
%__MODULE__{state | seed: seed, trace: opts[:trace]}
end
def put_event(
state = %__MODULE{
failures_counter: failures_counter,
tests_counter: tests_counter,
width: width
},
{
:test_finished,
test = %ExUnit.Test{
logs: logs,
state:
failed = {
:failed,
{_, _reason, _}
},
time: time
}
}
) do
updated_failures_counter = failures_counter + 1
attributes = attributes(test)
formatted_failure =
ExUnit.Formatter.format_test_failure(
test,
failed,
updated_failures_counter,
width,
&formatter/2
)
details = IO.iodata_to_binary([formatted_failure, format_logs(logs)])
put_formatted(
:test_failed,
Keyword.merge(
attributes,
details: details,
message: ""
)
)
put_formatted(
:test_finished,
Keyword.merge(
attributes,
duration: div(time, 1000)
)
)
%{
state
| tests_counter: tests_counter + 1,
failures_counter: updated_failures_counter
}
end
def put_event(
state = %__MODULE__{
failures_counter: failures_counter,
width: width,
tests_counter: tests_counter
},
{:test_finished, test = %ExUnit.Test{logs: logs, state: {:failed, failed}, time: time}}
)
when is_list(failed) do
updated_failures_counter = failures_counter + 1
attributes = attributes(test)
formatted_failure =
ExUnit.Formatter.format_test_failure(
test,
failed,
updated_failures_counter,
width,
&formatter/2
)
details = IO.iodata_to_binary([formatted_failure, format_logs(logs)])
put_formatted(
:test_failed,
Keyword.merge(
attributes,
details: details,
message: ""
)
)
put_formatted(
:test_finished,
Keyword.merge(
attributes,
duration: div(time, 1000)
)
)
%{
state
| tests_counter: tests_counter + 1,
failures_counter: updated_failures_counter
}
end
def put_event(
state = %__MODULE__{
tests_counter: tests_counter,
skipped_counter: skipped_counter
},
{:test_finished, test = %ExUnit.Test{state: {ignored, _}}}
) when ignored in ~w(excluded skip skipped)a do
attributes = attributes(test)
put_formatted(:test_ignored, attributes)
put_formatted(:test_finished, attributes)
%{
state
| tests_counter: tests_counter + 1,
skipped_counter: skipped_counter + 1
}
end
def put_event(
state = %__MODULE__{},
{
:test_finished,
test = %ExUnit.Test{
time: time
}
}
) do
put_formatted(
:test_finished,
test
|> attributes()
|> Keyword.merge(duration: div(time, 1000))
)
state
end
def put_event(state = %__MODULE__{}, {:test_started, test = %ExUnit.Test{tags: tags}}) do
put_formatted(
:test_started,
test
|> attributes()
|> Keyword.merge(locationHint: "file://#{tags[:file]}:#{tags[:line]}")
)
state
end
def put_event(state = %__MODULE__{}, event) do
IO.warn(
"#{inspect(__MODULE__)} does not know how to process event (#{inspect(event)}). " <>
"Please report this message to https://github.com/KronicDeth/intellij-elixir/issues/new."
)
state
end
## Private Functions
defp attributes(test_or_test_module) do
[
nodeId: nodeId(test_or_test_module),
name: name(test_or_test_module),
parentNodeId: parentNodeId(test_or_test_module)
]
end
defp camelize(s) do
[head | tail] = String.split(s, "_")
"#{head}#{Enum.map(tail, &String.capitalize/1)}"
end
defp colorize(escape, string) do
[escape, string, :reset]
|> IO.ANSI.format_fragment(true)
|> IO.iodata_to_binary()
end
# Must escape certain characters
# see: https://confluence.jetbrains.com/display/TCD9/Build+Script+Interaction+with+TeamCity
defp escape_output(s) when not is_binary(s), do: escape_output("#{s}")
defp escape_output(s) do
s
|> String.replace("|", "||")
|> String.replace("'", "|'")
|> String.replace("\n", "|n")
|> String.replace("\r", "|r")
|> String.replace("[", "|[")
|> String.replace("]", "|]")
end
defp format(type, attributes) do
messageName =
type
|> Atom.to_string()
|> camelize()
attrs =
attributes
|> Enum.map(&format_attribute/1)
|> Enum.join(" ")
"##teamcity[#{messageName} #{attrs}]"
end
defp format_attribute({k, v}) do
"#{Atom.to_string(k)}='#{escape_output(v)}'"
end
defp format_logs(""), do: ""
defp format_logs(logs) do
indent = "\n "
indented_logs = String.replace(logs, "\n", indent)
[indent, "The following output was logged:", indent | indented_logs]
end
defp format_module_name(module_name) do
module_name
|> to_string()
|> String.replace(~r/\bElixir\./, "")
end
defp formatter(:diff_enabled?, _), do: true
defp formatter(:error_info, msg), do: colorize(:red, msg)
defp formatter(:extra_info, msg), do: colorize(:cyan, msg)
defp formatter(:location_info, msg), do: colorize([:bright, :black], msg)
defp formatter(:diff_delete, msg), do: colorize(:red, msg)
defp formatter(:diff_delete_whitespace, msg), do: colorize(IO.ANSI.color_background(2, 0, 0), msg)
defp formatter(:diff_insert, msg), do: colorize(:green, msg)
defp formatter(:diff_insert_whitespace, msg), do: colorize(IO.ANSI.color_background(0, 2, 0), msg)
defp formatter(:blame_diff, msg), do: colorize(:red, msg)
defp formatter(_, msg), do: msg
defp name(test = %ExUnit.Test{name: name}) do
named_captures =
Regex.named_captures(
~r|test doc at (?<module>.+)\.(?<function>\w+)/(?<arity>\d+) \((?<count>\d+)\)|,
to_string(name)
)
name(test, named_captures)
end
defp name(%ExUnit.TestModule{name: name}), do: format_module_name(name)
defp name(%ExUnit.Test{name: name}, nil), do: to_string(name)
defp name(
%ExUnit.Test{module: module_name},
%{"arity" => arity, "count" => count, "function" => function, "module" => module}
) do
name = "#{function}/#{arity} doc (#{count})"
if module <> "Test" == format_module_name(module_name) do
name
else
"#{module}.#{name}"
end
end
defp nodeId(%ExUnit.Test{module: module_name, name: name}), do: "#{module_name}.#{name}"
defp nodeId(%ExUnit.TestModule{name: name}), do: name
defp parentNodeId(%ExUnit.Test{module: module_name}), do: module_name
defp parentNodeId(%ExUnit.TestModule{}), do: @root_parent_node_id
# DO NOT use `flowId` as an attribute. IDEA ignores flowId and so it can't be used to interleave async test output
defp put_formatted(type, attributes) do
type
|> format(attributes)
|> IO.puts()
end
end
|
resources/exunit/1.6.0/team_city_ex_unit_formatting.ex
| 0.642993
| 0.589657
|
team_city_ex_unit_formatting.ex
|
starcoder
|
defmodule Horde.DynamicSupervisor do
@moduledoc """
A distributed supervisor.
Horde.DynamicSupervisor implements a distributed DynamicSupervisor backed by a add-wins last-write-wins Ξ΄-CRDT (provided by `DeltaCrdt.AWLWWMap`). This CRDT is used for both tracking membership of the cluster and tracking supervised processes.
Using CRDTs guarantees that the distributed, shared state will eventually converge. It also means that Horde.DynamicSupervisor is eventually-consistent, and is optimized for availability and partition tolerance. This can result in temporary inconsistencies under certain conditions (when cluster membership is changing, for example).
Cluster membership is managed with `Horde.Cluster`. Joining a cluster can be done with `Horde.Cluster.set_members/2`. To take a node out of the cluster, call `Horde.Cluster.set_members/2` without that node in the list. Alternatively, setting the `members` startup option to `:auto` will make Horde auto-manage cluster membership so that all (and only) visible nodes are members of the cluster.
Each Horde.DynamicSupervisor node wraps its own local instance of `DynamicSupervisor`. `Horde.DynamicSupervisor.start_child/2` (for example) delegates to the local instance of DynamicSupervisor to actually start and monitor the child. The child spec is also written into the processes CRDT, along with a reference to the node on which it is running. When there is an update to the processes CRDT, Horde makes a comparison and corrects any inconsistencies (for example, if a conflict has been resolved and there is a process that no longer should be running on its node, it will kill that process and remove it from the local supervisor). So while most functions map 1:1 to the equivalent DynamicSupervisor functions, the eventually consistent nature of Horde requires extra behaviour not present in DynamicSupervisor.
## Divergence from standard DynamicSupervisor behaviour
While Horde wraps DynamicSupervisor, it does keep track of processes by the `id` in the child specification. This is a divergence from the behaviour of DynamicSupervisor, which ignores ids altogether. Using DynamicSupervisor is useful for its shutdown behaviour (it shuts down all child processes simultaneously, unlike `Supervisor`).
## Graceful shutdown
When a node is stopped (either manually or by calling `:init.stop`), Horde restarts the child processes of the stopped node on another node. The state of child processes is not preserved, they are simply restarted.
To implement graceful shutdown of worker processes, a few extra steps are necessary.
1. Trap exits. Running `Process.flag(:trap_exit)` in the `init/1` callback of any `worker` processes will convert exit signals to messages and allow running `terminate/2` callbacks. It is also important to include the `shutdown` option in your child spec (the default is 5000ms).
2. Use `:init.stop()` to shut down your node. How you accomplish this is up to you, but by simply calling `:init.stop()` somewhere, graceful shutdown will be triggered.
## Module-based Supervisor
Horde supports module-based supervisors to enable dynamic runtime configuration.
```elixir
defmodule MySupervisor do
use Horde.DynamicSupervisor
def start_link(init_arg, options \\ []) do
Horde.DynamicSupervisor.start_link(__MODULE__, init_arg, options)
end
def init(init_arg) do
[strategy: :one_for_one, members: members()]
|> Keyword.merge(init_arg)
|> Horde.DynamicSupervisor.init()
end
defp members() do
[]
end
end
```
Then you can use `MySupervisor.child_spec/1` and `MySupervisor.start_link/1` in the same way as you'd use `Horde.DynamicSupervisor.child_spec/1` and `Horde.DynamicSupervisor.start_link/1`.
"""
use Supervisor
@type options() :: [option()]
@type option ::
{:name, name :: atom()}
| {:strategy, Supervisor.strategy()}
| {:max_restarts, integer()}
| {:max_seconds, integer()}
| {:extra_arguments, [term()]}
| {:distribution_strategy, Horde.DistributionStrategy.t()}
| {:shutdown, integer()}
| {:members, [Horde.Cluster.member()] | :auto}
| {:delta_crdt_options, [DeltaCrdt.crdt_option()]}
| {:process_redistribution, :active | :passive}
@callback init(options()) :: {:ok, options()} | :ignore
@callback child_spec(options :: options()) :: Supervisor.child_spec()
defmacro __using__(options) do
quote location: :keep, bind_quoted: [options: options] do
@behaviour Horde.DynamicSupervisor
if Module.get_attribute(__MODULE__, :doc) == nil do
@doc """
Returns a specification to start this module under a supervisor.
See `Supervisor`.
"""
end
@impl true
def child_spec(arg) do
default = %{
id: __MODULE__,
start: {__MODULE__, :start_link, [arg]},
type: :supervisor
}
Supervisor.child_spec(default, unquote(Macro.escape(options)))
end
defoverridable child_spec: 1
end
end
@doc """
See `start_link/2` for options.
"""
def child_spec(options) when is_list(options) do
id = Keyword.get(options, :name, Horde.DynamicSupervisor)
%{
id: id,
start: {Horde.DynamicSupervisor, :start_link, [options]},
type: :supervisor
}
end
@doc """
Works like `DynamicSupervisor.start_link/1`. Extra options are documented here:
- `:distribution_strategy`, defaults to `Horde.UniformDistribution` but can also be set to `Horde.UniformQuorumDistribution`. `Horde.UniformQuorumDistribution` enforces a quorum and will shut down all processes on a node if it is split from the rest of the cluster.
"""
def start_link(options) when is_list(options) do
keys = [
:extra_arguments,
:max_children,
:max_seconds,
:max_restarts,
:strategy,
:distribution_strategy,
:process_redistribution,
:members,
:delta_crdt_options
]
{sup_options, start_options} = Keyword.split(options, keys)
start_link(Supervisor.Default, init(sup_options), start_options)
end
def start_link(mod, init_arg, opts \\ []) do
name = :"#{opts[:name]}.Supervisor"
start_options = Keyword.put(opts, :name, name)
Supervisor.start_link(__MODULE__, {mod, init_arg, opts[:name]}, start_options)
end
@doc """
Works like `DynamicSupervisor.init/1`.
"""
def init(options) when is_list(options) do
unless strategy = options[:strategy] do
raise ArgumentError, "expected :strategy option to be given"
end
intensity = Keyword.get(options, :max_restarts, 3)
period = Keyword.get(options, :max_seconds, 5)
max_children = Keyword.get(options, :max_children, :infinity)
extra_arguments = Keyword.get(options, :extra_arguments, [])
members = Keyword.get(options, :members, [])
delta_crdt_options = Keyword.get(options, :delta_crdt_options, [])
process_redistribution = Keyword.get(options, :process_redistribution, :passive)
distribution_strategy =
Keyword.get(
options,
:distribution_strategy,
Horde.UniformDistribution
)
flags = %{
strategy: strategy,
intensity: intensity,
period: period,
max_children: max_children,
extra_arguments: extra_arguments,
distribution_strategy: distribution_strategy,
members: members,
delta_crdt_options: delta_crdt_options(delta_crdt_options),
process_redistribution: process_redistribution
}
{:ok, flags}
end
def init({mod, init_arg, name}) do
case mod.init(init_arg) do
{:ok, flags} when is_map(flags) ->
[
{DeltaCrdt,
[
sync_interval: flags.delta_crdt_options.sync_interval,
max_sync_size: flags.delta_crdt_options.max_sync_size,
shutdown: flags.delta_crdt_options.shutdown,
crdt: DeltaCrdt.AWLWWMap,
on_diffs: {Horde.DynamicSupervisorImpl, :on_diffs, [name]},
name: crdt_name(name)
]},
{Horde.ProcessesSupervisor,
[
shutdown: :infinity,
root_name: name,
type: :supervisor,
name: supervisor_name(name),
strategy: flags.strategy
]},
{Horde.DynamicSupervisorImpl,
[
name: name,
root_name: name,
init_module: mod,
strategy: flags.strategy,
intensity: flags.intensity,
period: flags.period,
max_children: flags.max_children,
extra_arguments: flags.extra_arguments,
strategy: flags.strategy,
distribution_strategy: flags.distribution_strategy,
process_redistribution: flags.process_redistribution,
members: members(flags.members, name)
]},
{Horde.SignalShutdown,
[
signal_to: [name]
]},
{Horde.DynamicSupervisorTelemetryPoller, name}
]
|> maybe_add_node_manager(flags.members, name)
|> Supervisor.init(strategy: :one_for_all)
:ignore ->
:ignore
other ->
{:stop, {:bad_return, {mod, :init, other}}}
end
end
@doc """
Works like `DynamicSupervisor.stop/3`.
"""
def stop(supervisor, reason \\ :normal, timeout \\ :infinity),
do: Supervisor.stop(:"#{supervisor}.Supervisor", reason, timeout)
@doc """
Works like `DynamicSupervisor.start_child/2`.
"""
def start_child(supervisor, child_spec) do
child_spec = Supervisor.child_spec(child_spec, [])
call(supervisor, {:start_child, child_spec})
end
@doc """
Terminate a child process.
Works like `DynamicSupervisor.terminate_child/2`.
"""
@spec terminate_child(Supervisor.supervisor(), child_pid :: pid()) ::
:ok | {:error, :not_found} | {:error, {:node_dead_or_shutting_down, String.t()}}
def terminate_child(supervisor, child_pid) when is_pid(child_pid),
do: call(supervisor, {:terminate_child, child_pid})
@doc """
Works like `DynamicSupervisor.which_children/1`.
This function delegates to all supervisors in the cluster and returns the aggregated output. Where memory warnings apply to `DynamicSupervisor.which_children`, these count double for `Horde.DynamicSupervisor.which_children`.
"""
def which_children(supervisor), do: call(supervisor, :which_children)
@doc """
Works like `DynamicSupervisor.count_children/1`.
This function delegates to all supervisors in the cluster and returns the aggregated output.
"""
def count_children(supervisor), do: call(supervisor, :count_children)
@doc """
Waits for Horde.DynamicSupervisor to have quorum.
"""
@spec wait_for_quorum(horde :: GenServer.server(), timeout :: timeout()) :: :ok
def wait_for_quorum(horde, timeout) do
GenServer.call(horde, :wait_for_quorum, timeout)
end
defp call(supervisor, msg), do: GenServer.call(supervisor, msg, :infinity)
defp maybe_add_node_manager(children, :auto, name),
do: [{Horde.NodeListener, name} | children]
defp maybe_add_node_manager(children, _, _), do: children
defp delta_crdt_options(options) do
%{
sync_interval: Keyword.get(options, :sync_interval, 300),
max_sync_size: Keyword.get(options, :max_sync_size, :infinite),
shutdown: Keyword.get(options, :shutdown, 30_000)
}
end
def members(:auto, _name), do: :auto
def members(options, name) do
if name in options do
options
else
[name | options]
end
end
defp supervisor_name(name), do: :"#{name}.ProcessesSupervisor"
defp crdt_name(name), do: :"#{name}.Crdt"
end
|
lib/horde/dynamic_supervisor.ex
| 0.939004
| 0.802826
|
dynamic_supervisor.ex
|
starcoder
|
defmodule ExAlgo.List.LinkedList do
@moduledoc """
Implementation of a singly linked list.
"""
@type neg_index_error :: {:error, :negative_index}
@type empty_error :: {:error, :empty_list}
@type value_type :: any()
@type t :: %__MODULE__{container: [value_type()]}
defstruct container: []
@doc """
Creates an empty linked list.
## Example
iex> LinkedList.new
%LinkedList{container: []}
"""
@spec new :: t()
def new, do: %__MODULE__{container: []}
@doc """
Creates an empty linked list from a list
## Example
iex> LinkedList.from 1..3
%LinkedList{container: [1, 2, 3]}
"""
@spec from(Enumerable.t()) :: t()
def from(enumerable), do: %__MODULE__{container: Enum.to_list(enumerable)}
@doc """
Inserts a new element on the head of the list.
## Example
iex> list = LinkedList.from 1..3
iex> list |> LinkedList.insert(10)
%LinkedList{container: [10, 1, 2, 3]}
"""
@spec insert(t(), value_type()) :: t()
def insert(%__MODULE__{container: container}, element),
do: %__MODULE__{container: [element | container]}
@doc """
Removes the head.
## Example
iex> list = LinkedList.from 1..3
iex> list |> LinkedList.remove()
{1, %LinkedList{container: [2, 3]}}
iex> LinkedList.new() |> LinkedList.remove()
{:error, :empty_list}
"""
@spec remove(t()) :: {value_type(), t()} | empty_error()
def remove(%__MODULE__{container: []}), do: {:error, :empty_list}
def remove(%__MODULE__{container: [head | rest]}), do: {head, %__MODULE__{container: rest}}
@doc """
Returns the head of the linked list
## Example
iex> LinkedList.from(1..10) |> LinkedList.head()
1
iex> LinkedList.new |> LinkedList.head()
{:error, :empty_list}
"""
@spec head(t()) :: empty_error()
def head(%__MODULE__{container: [head | _]}), do: head
def head(_), do: {:error, :empty_list}
@doc """
Returns the next items of the linked list
## Example
iex> LinkedList.from(1..3) |> LinkedList.next()
%LinkedList{container: [2, 3]}
iex> LinkedList.new |> LinkedList.next()
{:error, :empty_list}
"""
@spec next(t()) :: t() | empty_error()
def next(%__MODULE__{container: [_ | next]}), do: %__MODULE__{container: next}
def next(_), do: {:error, :empty_list}
@doc """
Return the element at index. Index is 0 based and must be positive. Errors on empty list.
## Example
iex> LinkedList.from(0..10) |> LinkedList.at(3)
3
"""
@spec at(t(), value_type()) :: value_type() | empty_error() | neg_index_error()
def at(%__MODULE__{container: []}, _), do: {:error, :empty_list}
def at(_, index) when index < 0, do: {:error, :negative_index}
def at(list, 0), do: list |> head()
def at(list, index), do: list |> next() |> at(index - 1)
end
|
lib/ex_algo/list/linked_list.ex
| 0.85443
| 0.436742
|
linked_list.ex
|
starcoder
|
defmodule Rihanna do
@moduledoc """
The primary client interface for Rihanna.
There are two ways to dispatch jobs with Rihanna:
1. Using mod-fun-args which is a bit like RPC
2. Using a module that implements `Rihanna.Job` and passing in one argument
See the documentation for `enqueue/1` and `enqueue/2` for more details.
## Supervisor
You must have started `Rihanna.Supervisor` otherwise you will see an error trying
to enqueue or retry jobs.
## Database Connections
Rihanna requires 1 + N database connections per node, where 1 connection is used
for the external API of enqueuing/retrying jobs and N is the number of
dispatchers. The default configuration is to run one dispatcher per node, so
this will use 2 database connections.
## Notes on queueing
Rihanna uses a FIFO job queue, so jobs will be processed roughly in the order
that they are enqueued. However, because Rihanna is a concurrent job queue,
it may have multiple workers processing jobs at the same time so there is no
guarantee of any ordering in practice.
## Scheduling
You can schedule jobs for deferred execution using `schedule/2` and
`schedule/3`. Jobs scheduled for later execution will run shortly after the
due at date, but there is no guarantee on exactly when they will run.
"""
@enqueue_help_message """
Rihanna.Enqueue requires either one argument in the form {mod, fun, args} or
two arguments of a module implementing Rihanna.Job and its arg.
For example, to run IO.puts("Hello"):
> Rihanna.enqueue({IO, :puts, ["Hello"]})
Or, if you have a job called MyJob that implements the Rihanna.Job behaviour:
> Rihanna.enqueue(MyJob, arg)
"""
@doc """
Enqueues a job specified as a simple mod-fun-args tuple.
## Example
> Rihanna.enqueue({IO, :puts, ["Umbrella-ella-ella"]})
"""
@spec enqueue({module, atom, list()}) :: {:ok, Rihanna.Job.t()}
def enqueue(term = {mod, fun, args}) when is_atom(mod) and is_atom(fun) and is_list(args) do
Rihanna.Job.enqueue(term)
end
def enqueue(_) do
raise ArgumentError, @enqueue_help_message
end
@doc """
Enqueues a job specified as a module and one argument.
It is expected that the module implements the `Rihanna.Job` behaviour and
defines a function `c:Rihanna.Job.perform/1`.
The argument may be anything.
See `Rihanna.Job` for more on how to implement your own jobs.
You can enqueue a job like so:
```
# Enqueue job for later execution and return immediately
Rihanna.enqueue(MyApp.MyJob, [arg1, arg2])
```
"""
@spec enqueue(module, any) :: {:ok, Rihanna.Job.t()}
def enqueue(mod, arg) when is_atom(mod) do
Rihanna.Job.enqueue({mod, arg})
end
def enqueue(_, _) do
raise ArgumentError, @enqueue_help_message
end
@type schedule_option :: {:at, DateTime.t()} | {:in, pos_integer}
@type schedule_options :: [schedule_option]
@doc """
Schedule a job specified as a simple mod-fun-args tuple to run later.
## Example
Schedule at a `DateTime`:
due_at = ~N[2018-07-01 12:00:00] |> DateTime.from_naive!("Etc/UTC")
Rihanna.schedule({IO, :puts, ["Umbrella-ella-ella"]}, at: due_at)
Schedule in 5 minutes:
Rihanna.schedule({IO, :puts, ["Umbrella-ella-ella"]}, in: :timer.minutes(5))
"""
@spec schedule({module, atom, list()}, schedule_options) :: {:ok, Rihanna.Job.t()}
def schedule(term = {mod, fun, args}, schedule_options)
when is_atom(mod) and is_atom(fun) and is_list(args) do
Rihanna.Job.enqueue(term, due_at(schedule_options))
end
@doc """
Schedule a job specified as a module and one argument to run later.
It is expected that the module implements the `Rihanna.Job` behaviour and
defines a function `c:Rihanna.Job.perform/1`.
The argument may be anything.
See `Rihanna.Job` for more on how to implement your own jobs.
## Example
Schedule at a `DateTime`:
due_at = DateTime.from_naive!(~N[2018-07-01 12:00:00], "Etc/UTC")
Rihanna.schedule(MyApp.MyJob, [arg1, arg2], at: due_at)
Schedule in 5 minutes:
Rihanna.schedule(MyApp.MyJob, [arg1, arg2], in: :timer.minutes(5))
"""
@spec schedule(module, any, schedule_options) :: {:ok, Rihanna.Job.t()}
def schedule(mod, arg, schedule_options) when is_atom(mod) do
Rihanna.Job.enqueue({mod, arg}, due_at(schedule_options))
end
@doc """
Retries a job by ID. ID can be passed as either integer or string.
Note that this only works if the job has failed - if it has not yet run or is
currently in progress, this function will do nothing.
"""
@spec retry(String.t()) :: {:ok, :retried} | {:error, :job_not_found}
def retry(job_id) when is_binary(job_id) do
job_id
|> String.to_integer()
|> retry()
end
@spec retry(integer) :: {:ok, :retried} | {:error, :job_not_found}
def retry(job_id) when is_integer(job_id) and job_id > 0 do
Rihanna.Job.retry_failed(job_id)
end
@doc """
Deletes a job by ID. ID can be passed as either integer or string.
"""
@spec delete(String.t() | integer) :: {:ok, Rihanna.Job.t()} | {:error, :job_not_found}
def delete(job_id) when is_binary(job_id) do
job_id
|> String.to_integer()
|> delete()
end
def delete(job_id) when is_integer(job_id) and job_id > 0 do
Rihanna.Job.delete(job_id)
end
defp due_at(at: %DateTime{} = due_at), do: due_at
defp due_at(in: due_in) when is_integer(due_in) and due_in > 0 do
now = DateTime.utc_now() |> DateTime.to_unix(:millisecond)
DateTime.from_unix!(now + due_in, :millisecond)
end
end
|
lib/rihanna.ex
| 0.904467
| 0.879354
|
rihanna.ex
|
starcoder
|
defmodule AWS.TimestreamWrite do
@moduledoc """
Amazon Timestream is a fast, scalable, fully managed time series database
service that makes it easy to store and analyze trillions of time series data
points per day.
With Timestream, you can easily store and analyze IoT sensor data to derive
insights from your IoT applications. You can analyze industrial telemetry to
streamline equipment management and maintenance. You can also store and analyze
log data and metrics to improve the performance and availability of your
applications. Timestream is built from the ground up to effectively ingest,
process, and store time series data. It organizes data to optimize query
processing. It automatically scales based on the volume of data ingested and on
the query volume to ensure you receive optimal performance while inserting and
querying data. As your data grows over time, Timestreamβs adaptive query
processing engine spans across storage tiers to provide fast analysis while
reducing costs.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "Timestream Write",
api_version: "2018-11-01",
content_type: "application/x-amz-json-1.0",
credential_scope: nil,
endpoint_prefix: "ingest.timestream",
global?: false,
protocol: "json",
service_id: "Timestream Write",
signature_version: "v4",
signing_name: "timestream",
target_prefix: "Timestream_20181101"
}
end
@doc """
Creates a new Timestream database.
If the KMS key is not specified, the database will be encrypted with a
Timestream managed KMS key located in your account. Refer to [AWS managed KMS keys](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#aws-managed-cmk)
for more info. Service quotas apply. For more information, see [Access Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def create_database(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateDatabase", input, options)
end
@doc """
The CreateTable operation adds a new table to an existing database in your
account.
In an AWS account, table names must be at least unique within each Region if
they are in the same database. You may have identical table names in the same
Region if the tables are in seperate databases. While creating the table, you
must specify the table name, database name, and the retention properties.
Service quotas apply. For more information, see [Access Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def create_table(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateTable", input, options)
end
@doc """
Deletes a given Timestream database.
*This is an irreversible operation. After a database is deleted, the time series
data from its tables cannot be recovered.*
All tables in the database must be deleted first, or a ValidationException error
will be thrown.
Due to the nature of distributed retries, the operation can return either
success or a ResourceNotFoundException. Clients should consider them equivalent.
"""
def delete_database(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteDatabase", input, options)
end
@doc """
Deletes a given Timestream table.
This is an irreversible operation. After a Timestream database table is deleted,
the time series data stored in the table cannot be recovered.
Due to the nature of distributed retries, the operation can return either
success or a ResourceNotFoundException. Clients should consider them equivalent.
"""
def delete_table(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteTable", input, options)
end
@doc """
Returns information about the database, including the database name, time that
the database was created, and the total number of tables found within the
database.
Service quotas apply. For more information, see [Access Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def describe_database(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDatabase", input, options)
end
@doc """
DescribeEndpoints returns a list of available endpoints to make Timestream API
calls against.
This API is available through both Write and Query.
Because Timestreamβs SDKs are designed to transparently work with the serviceβs
architecture, including the management and mapping of the service endpoints, *it
is not recommended that you use this API unless*:
* Your application uses a programming language that does not yet
have SDK support
* You require better control over the client-side implementation
For detailed information on how to use DescribeEndpoints, see [The Endpoint Discovery Pattern and REST
APIs](https://docs.aws.amazon.com/timestream/latest/developerguide/Using-API.endpoint-discovery.html).
"""
def describe_endpoints(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeEndpoints", input, options)
end
@doc """
Returns information about the table, including the table name, database name,
retention duration of the memory store and the magnetic store.
Service quotas apply. For more information, see [Access Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def describe_table(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeTable", input, options)
end
@doc """
Returns a list of your Timestream databases.
Service quotas apply. For more information, see [Access Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def list_databases(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListDatabases", input, options)
end
@doc """
A list of tables, along with the name, status and retention properties of each
table.
"""
def list_tables(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTables", input, options)
end
@doc """
List all tags on a Timestream resource.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Associate a set of tags with a Timestream resource.
You can then activate these user-defined tags so that they appear on the Billing
and Cost Management console for cost allocation tracking.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Removes the association of tags from a Timestream resource.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Modifies the KMS key for an existing database.
While updating the database, you must specify the database name and the
identifier of the new KMS key to be used (`KmsKeyId`). If there are any
concurrent `UpdateDatabase` requests, first writer wins.
"""
def update_database(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateDatabase", input, options)
end
@doc """
Modifies the retention duration of the memory store and magnetic store for your
Timestream table.
Note that the change in retention duration takes effect immediately. For
example, if the retention period of the memory store was initially set to 2
hours and then changed to 24 hours, the memory store will be capable of holding
24 hours of data, but will be populated with 24 hours of data 22 hours after
this change was made. Timestream does not retrieve data from the magnetic store
to populate the memory store.
Service quotas apply. For more information, see [Access Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def update_table(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateTable", input, options)
end
@doc """
The WriteRecords operation enables you to write your time series data into
Timestream.
You can specify a single data point or a batch of data points to be inserted
into the system. Timestream offers you with a flexible schema that auto detects
the column names and data types for your Timestream tables based on the
dimension names and data types of the data points you specify when invoking
writes into the database. Timestream support eventual consistency read
semantics. This means that when you query data immediately after writing a batch
of data into Timestream, the query results might not reflect the results of a
recently completed write operation. The results may also include some stale
data. If you repeat the query request after a short time, the results should
return the latest data. Service quotas apply. For more information, see [Access Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def write_records(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "WriteRecords", input, options)
end
end
|
lib/aws/generated/timestream_write.ex
| 0.88275
| 0.633609
|
timestream_write.ex
|
starcoder
|
defmodule Scenic.Primitive.Transform do
@moduledoc """
Generic code for the transform styles.
Not intended to be used directly
"""
alias Scenic.Math.Matrix
alias Scenic.Math.Vector2
alias Scenic.Primitive.Transform
@callback info(data :: any) :: bitstring
@callback verify(any) :: boolean
# ===========================================================================
defmodule FormatError do
@moduledoc false
defexception message: nil, module: nil, data: nil
end
@style_name_map %{
:pin => Transform.Pin,
:scale => Transform.Scale,
:rotate => Transform.Rotate,
:translate => Transform.Translate,
:matrix => Transform.Matrix
}
# ===========================================================================
# defmacro __using__([type_code: type_code]) when is_integer(type_code) do
defmacro __using__(_opts) do
quote do
@behaviour Scenic.Primitive.Transform
def verify!(data) do
case verify(data) do
true ->
data
false ->
raise FormatError, message: info(data), module: __MODULE__, data: data
end
end
end
# quote
end
# defmacro
# ===========================================================================
def verify!(tx_key, tx_data) do
case Map.get(@style_name_map, tx_key) do
nil -> raise FormatError, message: "Unknown transform", module: tx_key, data: tx_data
module -> module.verify!(tx_data)
end
end
# ============================================================================
# transform helper functions
# --------------------------------------------------------
def calculate_local(txs)
def calculate_local(nil), do: nil
def calculate_local(txs) when txs == %{}, do: nil
def calculate_local(%{pin: _} = txs) do
# look for case where only the pin is set
case Enum.count(txs) do
1 -> nil
_ -> do_calculate_local(txs)
end
end
def calculate_local(txs), do: do_calculate_local(txs)
defp do_calculate_local(txs) do
# start with identity - which is like multiplying by 1
Matrix.identity()
|> multiply_partial(:matrix, txs[:matrix])
|> multiply_partial(:translate, txs[:translate])
|> rotate_and_scale(txs)
end
# --------------------------------------------------------
defp multiply_partial(mx, type, value)
defp multiply_partial(mx, _, nil), do: mx
defp multiply_partial(mx, :pin, point), do: Matrix.translate(mx, point)
defp multiply_partial(mx, :scale, pct), do: Matrix.scale(mx, pct)
defp multiply_partial(mx, :rotate, rot), do: Matrix.rotate(mx, rot)
defp multiply_partial(mx, :translate, trns), do: Matrix.translate(mx, trns)
defp multiply_partial(mx, :matrix, dev_mx), do: Matrix.mul(mx, dev_mx)
defp multiply_partial(mx, :inv_pin, point) do
Matrix.translate(mx, Vector2.invert(point))
end
# --------------------------------------------------------
defp rotate_and_scale(mx, txs) do
# don't do any work if neight rotate nor scale are set
# don't need to translate twice for no reason
case txs[:rotate] || txs[:scale] do
nil ->
mx
_ ->
mx
|> multiply_partial(:pin, txs[:pin])
|> multiply_partial(:rotate, txs[:rotate])
|> multiply_partial(:scale, txs[:scale])
|> multiply_partial(:inv_pin, txs[:pin])
end
end
end
|
lib/scenic/primitive/transform/transform.ex
| 0.78838
| 0.493287
|
transform.ex
|
starcoder
|
defmodule WechatPay.Plug.Refund do
@moduledoc """
Plug behaviour to handle **Refund** Notification from Wechat's Payment Gateway.
Official document: https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=9_16&index=9
## Example
### Define a handler
See `WechatPay.Plug.Handler` for how to implement a handler.
```elixir
defmodule MyApp.WechatHandler do
use WechatPay.Plug.Handler
@impl WechatPay.Plug.Handler
def handle_data(conn, _data) do
:ok
end
end
```
### Plug in
In your app's `lib/my_app_web/router.ex`:
```elixir
post "/wechat_pay/notification/refund", WechatPay.Plug.Refund, [handler: MyApp.WechatHandler, api_key: "my-api-key"]
```
"""
alias WechatPay.Utils.XMLParser
alias WechatPay.Error
import Plug.Conn
@spec init(keyword()) :: [{:api_key, any()} | {:handler, any()}]
def init(opts) do
handler = Keyword.get(opts, :handler)
api_key = Keyword.get(opts, :api_key)
[handler: handler, api_key: api_key]
end
@spec call(Plug.Conn.t(), [{:api_key, any()} | {:handler, any()}]) :: Plug.Conn.t()
def call(conn, handler: handler_module, api_key: api_key) do
{:ok, body, conn} = Plug.Conn.read_body(conn)
with {:ok, data} <- XMLParser.parse(body),
:ok <- process_data(conn, data, handler_module, api_key) do
response_with_success_info(conn)
else
{:error, %Error{reason: reason}} ->
conn
|> send_resp(:unprocessable_entity, reason)
end
end
defp response_with_success_info(conn) do
body = ~s"""
<xml>
<return_code><![CDATA[SUCCESS]]></return_code>
<return_msg><![CDATA[OK]]></return_msg>
</xml>
"""
conn
|> put_resp_content_type("application/xml")
|> send_resp(:ok, body)
end
defp process_data(conn, data, handler_module, api_key) do
with {:ok, data} <- process_return_field(data),
{:ok, decrypted_data} <- decrypt_data(data, api_key),
{:ok, map} <- XMLParser.parse(decrypted_data, "root"),
:ok <- apply(handler_module, :handle_data, [conn, map]) do
:ok
else
{:error, %Error{} = error} ->
maybe_handle_error(handler_module, conn, error, data)
{:error, error}
end
end
defp process_return_field(%{return_code: "SUCCESS"} = data) do
{:ok, data}
end
defp process_return_field(%{return_code: "FAIL", return_msg: reason}) do
{:error, %Error{reason: reason, type: :failed_return}}
end
defp maybe_handle_error(handler_module, conn, error, data) do
handler_module.handle_error(conn, error, data)
end
defp decrypt_data(%{req_info: encrypted_data}, api_key) do
key =
:md5
|> :crypto.hash(api_key)
|> Base.encode16(case: :lower)
{:ok, data} =
encrypted_data
|> Base.decode64()
try do
xml_string =
if Code.ensure_loaded?(:crypto) and function_exported?(:crypto, :crypto_one_time, 4) do
:crypto.crypto_one_time(:aes_256_ecb, key, data, false)
else
:crypto.block_decrypt(:aes_ecb, key, data)
end
{:ok, xml_string}
rescue
ArgumentError ->
{:error, %Error{reason: "Fail to decrypt req_info", type: :fail_to_decrypt_req_info}}
end
end
defp decrypt_data(_, _api_key) do
{:error,
%Error{reason: "Missing the encrypted `req_info` in response data", type: :missing_req_info}}
end
end
|
lib/wechat_pay/plug/refund.ex
| 0.805173
| 0.567997
|
refund.ex
|
starcoder
|
defmodule StringIO do
@moduledoc """
Controls an IO device process that wraps a string.
A `StringIO` IO device can be passed as a "device" to
most of the functions in the `IO` module.
## Examples
iex> {:ok, pid} = StringIO.open("foo")
iex> IO.read(pid, 2)
"fo"
"""
use GenServer
@doc ~S"""
Creates an IO device.
`string` will be the initial input of the newly created
device.
The device will be created and sent to the function given.
When the function returns, the device will be closed. The final
result will be a tuple with `:ok` and the result of the function.
## Options
* `:capture_prompt` - if set to `true`, prompts (specified as
arguments to `IO.get*` functions) are captured in the output.
Defaults to `false`.
* `:encoding` (since v1.10.0) - encoding of the IO device. Allowed
values are `:unicode` (default) and `:latin1`.
## Examples
iex> StringIO.open("foo", [], fn pid ->
...> input = IO.gets(pid, ">")
...> IO.write(pid, "The input was #{input}")
...> StringIO.contents(pid)
...> end)
{:ok, {"", "The input was foo"}}
iex> StringIO.open("foo", [capture_prompt: true], fn pid ->
...> input = IO.gets(pid, ">")
...> IO.write(pid, "The input was #{input}")
...> StringIO.contents(pid)
...> end)
{:ok, {"", ">The input was foo"}}
"""
@doc since: "1.7.0"
@spec open(binary, keyword, (pid -> res)) :: {:ok, res} when res: var
def open(string, options, function)
when is_binary(string) and is_list(options) and is_function(function, 1) do
{:ok, pid} = GenServer.start_link(__MODULE__, {string, options}, [])
try do
{:ok, function.(pid)}
after
{:ok, {_input, _output}} = close(pid)
end
end
@doc ~S"""
Creates an IO device.
`string` will be the initial input of the newly created
device.
`options_or_function` can be a keyword list of options or
a function.
If options are provided, the result will be `{:ok, pid}`, returning the
IO device created. The option `:capture_prompt`, when set to `true`, causes
prompts (which are specified as arguments to `IO.get*` functions) to be
included in the device's output.
If a function is provided, the device will be created and sent to the
function. When the function returns, the device will be closed. The final
result will be a tuple with `:ok` and the result of the function.
## Examples
iex> {:ok, pid} = StringIO.open("foo")
iex> IO.gets(pid, ">")
"foo"
iex> StringIO.contents(pid)
{"", ""}
iex> {:ok, pid} = StringIO.open("foo", capture_prompt: true)
iex> IO.gets(pid, ">")
"foo"
iex> StringIO.contents(pid)
{"", ">"}
iex> StringIO.open("foo", fn pid ->
...> input = IO.gets(pid, ">")
...> IO.write(pid, "The input was #{input}")
...> StringIO.contents(pid)
...> end)
{:ok, {"", "The input was foo"}}
"""
@spec open(binary, keyword) :: {:ok, pid}
@spec open(binary, (pid -> res)) :: {:ok, res} when res: var
def open(string, options_or_function \\ [])
def open(string, options_or_function) when is_binary(string) and is_list(options_or_function) do
GenServer.start_link(__MODULE__, {string, options_or_function}, [])
end
def open(string, options_or_function)
when is_binary(string) and is_function(options_or_function, 1) do
open(string, [], options_or_function)
end
@doc """
Returns the current input/output buffers for the given IO
device.
## Examples
iex> {:ok, pid} = StringIO.open("in")
iex> IO.write(pid, "out")
iex> StringIO.contents(pid)
{"in", "out"}
"""
@spec contents(pid) :: {binary, binary}
def contents(pid) when is_pid(pid) do
GenServer.call(pid, :contents)
end
@doc """
Flushes the output buffer and returns its current contents.
## Examples
iex> {:ok, pid} = StringIO.open("in")
iex> IO.write(pid, "out")
iex> StringIO.flush(pid)
"out"
iex> StringIO.contents(pid)
{"in", ""}
"""
@spec flush(pid) :: binary
def flush(pid) when is_pid(pid) do
GenServer.call(pid, :flush)
end
@doc """
Stops the IO device and returns the remaining input/output
buffers.
## Examples
iex> {:ok, pid} = StringIO.open("in")
iex> IO.write(pid, "out")
iex> StringIO.close(pid)
{:ok, {"in", "out"}}
"""
@spec close(pid) :: {:ok, {binary, binary}}
def close(pid) when is_pid(pid) do
GenServer.call(pid, :close)
end
## callbacks
@impl true
def init({string, options}) do
capture_prompt = options[:capture_prompt] || false
encoding = options[:encoding] || :unicode
{:ok, %{encoding: encoding, input: string, output: "", capture_prompt: capture_prompt}}
end
@impl true
def handle_info({:io_request, from, reply_as, req}, state) do
state = io_request(from, reply_as, req, state)
{:noreply, state}
end
def handle_info(_message, state) do
{:noreply, state}
end
@impl true
def handle_call(:contents, _from, %{input: input, output: output} = state) do
{:reply, {input, output}, state}
end
def handle_call(:flush, _from, %{output: output} = state) do
{:reply, output, %{state | output: ""}}
end
def handle_call(:close, _from, %{input: input, output: output} = state) do
{:stop, :normal, {:ok, {input, output}}, state}
end
defp io_request(from, reply_as, req, state) do
{reply, state} = io_request(req, state)
io_reply(from, reply_as, reply)
state
end
defp io_request({:put_chars, chars} = req, state) do
put_chars(:latin1, chars, req, state)
end
defp io_request({:put_chars, mod, fun, args} = req, state) do
put_chars(:latin1, apply(mod, fun, args), req, state)
end
defp io_request({:put_chars, encoding, chars} = req, state) do
put_chars(encoding, chars, req, state)
end
defp io_request({:put_chars, encoding, mod, fun, args} = req, state) do
put_chars(encoding, apply(mod, fun, args), req, state)
end
defp io_request({:get_chars, prompt, count}, state) when count >= 0 do
io_request({:get_chars, :latin1, prompt, count}, state)
end
defp io_request({:get_chars, encoding, prompt, count}, state) when count >= 0 do
get_chars(encoding, prompt, count, state)
end
defp io_request({:get_line, prompt}, state) do
io_request({:get_line, :latin1, prompt}, state)
end
defp io_request({:get_line, encoding, prompt}, state) do
get_line(encoding, prompt, state)
end
defp io_request({:get_until, prompt, mod, fun, args}, state) do
io_request({:get_until, :latin1, prompt, mod, fun, args}, state)
end
defp io_request({:get_until, encoding, prompt, mod, fun, args}, state) do
get_until(encoding, prompt, mod, fun, args, state)
end
defp io_request({:get_password, encoding}, state) do
get_line(encoding, "", state)
end
defp io_request({:setopts, [encoding: encoding]}, state) when encoding in [:latin1, :unicode] do
{:ok, %{state | encoding: encoding}}
end
defp io_request({:setopts, _opts}, state) do
{{:error, :enotsup}, state}
end
defp io_request(:getopts, state) do
{[binary: true, encoding: state.encoding], state}
end
defp io_request({:get_geometry, :columns}, state) do
{{:error, :enotsup}, state}
end
defp io_request({:get_geometry, :rows}, state) do
{{:error, :enotsup}, state}
end
defp io_request({:requests, reqs}, state) do
io_requests(reqs, {:ok, state})
end
defp io_request(_, state) do
{{:error, :request}, state}
end
## put_chars
defp put_chars(encoding, chars, req, state) do
case :unicode.characters_to_binary(chars, encoding, state.encoding) do
string when is_binary(string) ->
{:ok, %{state | output: state.output <> string}}
{_, _, _} ->
{{:error, req}, state}
end
rescue
ArgumentError -> {{:error, req}, state}
end
## get_chars
defp get_chars(encoding, prompt, count, %{input: input} = state) do
case get_chars(input, encoding, count) do
{:error, _} = error ->
{error, state}
{result, input} ->
{result, state_after_read(state, input, prompt, 1)}
end
end
defp get_chars("", _encoding, _count) do
{:eof, ""}
end
defp get_chars(input, :latin1, count) when byte_size(input) < count do
{input, ""}
end
defp get_chars(input, :latin1, count) do
<<chars::binary-size(count), rest::binary>> = input
{chars, rest}
end
defp get_chars(input, :unicode, count) do
with {:ok, count} <- split_at(input, count, 0) do
<<chars::binary-size(count), rest::binary>> = input
{chars, rest}
end
end
defp split_at(_, 0, acc),
do: {:ok, acc}
defp split_at(<<h::utf8, t::binary>>, count, acc),
do: split_at(t, count - 1, acc + byte_size(<<h::utf8>>))
defp split_at(<<_, _::binary>>, _count, _acc),
do: {:error, :invalid_unicode}
defp split_at(<<>>, _count, acc),
do: {:ok, acc}
## get_line
defp get_line(encoding, prompt, %{input: input} = state) do
case bytes_until_eol(input, encoding, 0) do
{:split, 0} ->
{:eof, state_after_read(state, "", prompt, 1)}
{:split, count} ->
{result, remainder} = :erlang.split_binary(input, count)
{result, state_after_read(state, remainder, prompt, 1)}
{:replace_split, count} ->
{result, remainder} = :erlang.split_binary(input, count)
result = binary_part(result, 0, byte_size(result) - 2) <> "\n"
{result, state_after_read(state, remainder, prompt, 1)}
:error ->
{{:error, :collect_line}, state}
end
end
## get_until
defp get_until(encoding, prompt, mod, fun, args, %{input: input} = state) do
case get_until(input, encoding, mod, fun, args, [], 0) do
{result, input, count} ->
input =
case input do
:eof -> ""
_ -> list_to_binary(input, encoding)
end
{get_until_result(result, encoding), state_after_read(state, input, prompt, count)}
:error ->
{:error, state}
end
end
defp get_until("", encoding, mod, fun, args, continuation, count) do
case apply(mod, fun, [continuation, :eof | args]) do
{:done, result, rest} ->
{result, rest, count + 1}
{:more, next_continuation} ->
get_until("", encoding, mod, fun, args, next_continuation, count + 1)
end
end
defp get_until(chars, encoding, mod, fun, args, continuation, count) do
case bytes_until_eol(chars, encoding, 0) do
{kind, size} when kind in [:split, :replace_split] ->
{line, rest} = :erlang.split_binary(chars, size)
case apply(mod, fun, [continuation, binary_to_list(line, encoding) | args]) do
{:done, result, :eof} ->
{result, rest, count + 1}
{:done, result, extra} ->
{result, extra ++ binary_to_list(rest, encoding), count + 1}
{:more, next_continuation} ->
get_until(rest, encoding, mod, fun, args, next_continuation, count + 1)
end
:error ->
:error
end
end
defp binary_to_list(data, _) when is_list(data), do: data
defp binary_to_list(data, :unicode) when is_binary(data), do: String.to_charlist(data)
defp binary_to_list(data, :latin1) when is_binary(data), do: :erlang.binary_to_list(data)
defp list_to_binary(data, _) when is_binary(data), do: data
defp list_to_binary(data, :unicode) when is_list(data), do: List.to_string(data)
defp list_to_binary(data, :latin1) when is_list(data), do: :erlang.list_to_binary(data)
# From https://erlang.org/doc/apps/stdlib/io_protocol.html: result can be any
# Erlang term, but if it is a list(), the I/O server can convert it to a binary().
defp get_until_result(data, encoding) when is_list(data), do: list_to_binary(data, encoding)
defp get_until_result(data, _), do: data
## io_requests
defp io_requests([req | rest], {:ok, state}) do
io_requests(rest, io_request(req, state))
end
defp io_requests(_, result) do
result
end
## helpers
defp state_after_read(%{capture_prompt: false} = state, remainder, _prompt, _count) do
%{state | input: remainder}
end
defp state_after_read(%{capture_prompt: true, output: output} = state, remainder, prompt, count) do
output = <<output::binary, :binary.copy(IO.chardata_to_string(prompt), count)::binary>>
%{state | input: remainder, output: output}
end
defp bytes_until_eol("", _, count), do: {:split, count}
defp bytes_until_eol(<<"\r\n"::binary, _::binary>>, _, count), do: {:replace_split, count + 2}
defp bytes_until_eol(<<"\n"::binary, _::binary>>, _, count), do: {:split, count + 1}
defp bytes_until_eol(<<head::utf8, tail::binary>>, :unicode, count) do
bytes_until_eol(tail, :unicode, count + byte_size(<<head::utf8>>))
end
defp bytes_until_eol(<<_, tail::binary>>, :latin1, count) do
bytes_until_eol(tail, :latin1, count + 1)
end
defp bytes_until_eol(<<_::binary>>, _, _), do: :error
defp io_reply(from, reply_as, reply) do
send(from, {:io_reply, reply_as, reply})
end
end
|
lib/elixir/lib/string_io.ex
| 0.89934
| 0.606906
|
string_io.ex
|
starcoder
|
defmodule RDF.XSD.Integer do
@moduledoc """
`RDF.XSD.Datatype` for XSD integers.
Although the XSD spec defines integers as derived from `xsd:decimal` we implement
it here as a primitive datatype for simplicity and performance reasons.
"""
@type valid_value :: integer
use RDF.XSD.Datatype.Primitive,
name: "integer",
id: RDF.Utils.Bootstrapping.xsd_iri("integer")
alias RDF.XSD
def_applicable_facet XSD.Facets.MinInclusive
def_applicable_facet XSD.Facets.MaxInclusive
def_applicable_facet XSD.Facets.MinExclusive
def_applicable_facet XSD.Facets.MaxExclusive
def_applicable_facet XSD.Facets.TotalDigits
def_applicable_facet XSD.Facets.Pattern
@doc false
def min_inclusive_conform?(min_inclusive, value, _lexical) do
value >= min_inclusive
end
@doc false
def max_inclusive_conform?(max_inclusive, value, _lexical) do
value <= max_inclusive
end
@doc false
def min_exclusive_conform?(min_exclusive, value, _lexical) do
value > min_exclusive
end
@doc false
def max_exclusive_conform?(max_exclusive, value, _lexical) do
value < max_exclusive
end
@doc false
def total_digits_conform?(total_digits, value, _lexical) do
digit_count(value) <= total_digits
end
@doc false
def pattern_conform?(pattern, _value, lexical) do
XSD.Facets.Pattern.conform?(pattern, lexical)
end
@impl XSD.Datatype
def lexical_mapping(lexical, _) do
case Integer.parse(lexical) do
{integer, ""} -> integer
{_, _} -> @invalid_value
:error -> @invalid_value
end
end
@impl XSD.Datatype
@spec elixir_mapping(valid_value | any, Keyword.t()) :: value
def elixir_mapping(value, _)
def elixir_mapping(value, _) when is_integer(value), do: value
def elixir_mapping(_, _), do: @invalid_value
@impl RDF.Literal.Datatype
def do_cast(value)
def do_cast(%XSD.String{} = xsd_string) do
xsd_string.value |> new() |> canonical()
end
def do_cast(literal) do
cond do
XSD.Boolean.datatype?(literal) ->
case literal.value do
false -> new(0)
true -> new(1)
end
XSD.Decimal.datatype?(literal) ->
literal.value
|> Decimal.round(0, :down)
|> Decimal.to_integer()
|> new()
is_float(literal.value) and XSD.Double.datatype?(literal) -> # we're catching the XSD.Floats with this too
literal.value
|> trunc()
|> new()
true ->
super(literal)
end
end
@impl RDF.Literal.Datatype
def do_equal_value_same_or_derived_datatypes?(left, right), do: XSD.Numeric.do_equal_value?(left, right)
@impl RDF.Literal.Datatype
def do_equal_value_different_datatypes?(left, right), do: XSD.Numeric.do_equal_value?(left, right)
@impl RDF.Literal.Datatype
def do_compare(left, right), do: XSD.Numeric.do_compare(left, right)
@doc """
The number of digits in the XML Schema canonical form of the literal value.
"""
@spec digit_count(RDF.Literal.t() | integer) :: non_neg_integer | nil
def digit_count(%datatype{} = literal) do
if datatype?(literal) and datatype.valid?(literal) do
literal
|> datatype.value()
|> digit_count()
end
end
def digit_count(integer) when is_integer(integer) do
integer |> Integer.digits() |> length()
end
end
|
lib/rdf/xsd/datatypes/integer.ex
| 0.859752
| 0.715101
|
integer.ex
|
starcoder
|
defmodule JsonApiEspec.Core.AssertionStep do
defstruct steps: [], expected_msg: "", actual: nil, expected: nil
def init() do
%__MODULE__{}
end
def step(%__MODULE__{steps: steps} = assertion_step, {step, field} = step_field)
when is_atom(step) and is_binary(field) do
assertion_step
|> Map.put(:steps, [step_field | steps])
end
def remove_steps_to(%__MODULE__{steps: steps} = assertion_step, parent_steps)
when is_list(parent_steps) do
case find_step(steps, parent_steps) do
:no_step_found ->
:no_step_found
steps ->
assertion_step
|> Map.put(:steps, steps)
end
end
def error(%__MODULE__{} = assertion_step, expected_msg, actual, expected)
when is_binary(expected_msg) do
assertion_step
|> Map.put(:expected_msg, expected_msg)
|> Map.put(:actual, actual)
|> Map.put(:expected, expected)
end
def parse_error(%__MODULE__{steps: steps} = assertion_step, expected_msg, actual, expected)
when is_binary(expected_msg) do
assertion_step
|> Map.put(:steps, [{:response, "response"}])
|> Map.put(:expected_msg, expected_msg)
|> Map.put(:actual, actual)
|> Map.put(:expected, expected)
end
def failure(%__MODULE__{
steps: steps,
expected_msg: expected_msg,
actual: actual,
expected: expected
}) do
steps
|> Enum.map(fn {_step, field} -> field end)
|> Enum.reverse()
|> Enum.join(" > ")
|> (&{
&1,
expected_msg,
actual,
expected
}).()
end
defp find_step(_steps, []) do
:no_step_found
end
defp find_step(steps, [to_find | to_finds]) do
case find_in_steps(steps, to_find) do
:not_found ->
find_step(steps, to_finds)
steps ->
steps
end
end
defp find_in_steps([], _to_find) do
:not_found
end
defp find_in_steps([{step, _value} | next_steps] = steps, to_find) do
if step == to_find do
steps
else
find_in_steps(next_steps, to_find)
end
end
end
|
lib/core/assertion_step.ex
| 0.725843
| 0.830113
|
assertion_step.ex
|
starcoder
|
defmodule ConfigParser.ParseState do
@moduledoc false
@default_options %{
join_continuations: :with_newline,
delimeters: [:equal, :colon]
}
@map Application.get_env(:configparser, :map_implementation)
# What line of the "file" are we parsing
defstruct line_number: 1,
# Section that definitions go into
current_section: nil,
# The amount of whitespace on the last line
last_indent: 0,
# Could the line being parsed be a coninuation
continuation?: false,
# If this is a continuation, which key would it continue
last_key: nil,
# The result as it is being built.
result: {:ok, @map.new()},
# options used when parsing the config
options: @default_options
alias __MODULE__
def default_options, do: @default_options
def begin_section(parse_state, new_section) do
# Create a new result, based on the old, with the new section added
{:ok, section_map} = parse_state.result
# Only add a new section if it's not already there
section_key = String.trim(new_section)
new_result =
if @map.has_key?(section_map, section_key) do
# don't change the result if they section already exists
parse_state.result
else
# add the section as an empty map if it doesn't exist
{:ok, @map.put(section_map, section_key, @map.new())}
end
# next line cannot be a continuation
%{
parse_state
| current_section: section_key,
result: new_result,
continuation?: false,
last_key: nil
}
end
def define_config(parse_state, key, value) do
{:ok, section_map} = parse_state.result
if parse_state.current_section != nil do
# pull the values out for the section that's currently being built
value_map = section_map[parse_state.current_section]
# create a new set of values by adding the key/value pair passed in
new_values =
if value == nil do
@map.put(value_map, String.trim(key), nil)
else
@map.put(value_map, String.trim(key), String.trim(value))
end
# create a new result replacing the current section with thenew values
new_result = {:ok, @map.put(section_map, parse_state.current_section, new_values)}
# The next line could be a continuation of this value so set continuation to true
# and store the key that we're defining now.
%{parse_state | result: new_result, continuation?: true, last_key: String.trim(key)}
else
new_result =
{:error,
"A configuration section must be defined before defining configuration values in line #{parse_state.line_number}"}
%{parse_state | result: new_result}
end
end
def append_continuation(%ParseState{options: options} = parse_state, continuation_value) do
{:ok, section_map} = parse_state.result
# pull the values out for the section that's currently being built
value_map = section_map[parse_state.current_section]
# create a new set of values by adding the key/value pair passed in
new_value = append_continuation(options, value_map[parse_state.last_key], continuation_value)
define_config(parse_state, parse_state.last_key, new_value)
end
defp append_continuation(%{join_continuations: :with_newline}, value, continuation) do
"#{value}\n#{continuation}"
end
defp append_continuation(%{join_continuations: :with_space}, value, continuation) do
"#{value} #{continuation}"
end
end
|
lib/ConfigParser/ParseState.ex
| 0.660939
| 0.433802
|
ParseState.ex
|
starcoder
|
defmodule CHAT.TXT do
@moduledoc """
`CHAT.TXT` is a WebSocket client interface and client
socket protocol (session level) representation and
textual protocol termination.
"""
use N2O, with: [:n2o, :kvs]
require CHAT
defp format_msg(
CHAT."Pub"(bin: pl, key: id, adr: CHAT."Adr"(src: fr, dst: {:p2p, CHAT."P2P"(dst: to)}))
) do
:io_lib.format('~s:~s:~s:~s', [fr, to, id, pl])
end
@doc """
N2O protocol implementation (client, session part).
"""
def info({:text, <<"AUTH", x::binary>>}, r, s) do
a = :string.trim(:erlang.binary_to_list(x))
key = '/chat/' ++ a
N2O.reg({:client, key})
KVS.ensure(writer(id: key))
{:reply, {:text, <<"USER " <> :erlang.list_to_binary(a)::binary>>}, r, cx(s, session: a)}
end
def info({:text, <<"SEND", _::binary>>}, r, cx(session: []) = s),
do: {:reply, {:text, "Please login with AUTH. Try HELP."}, r, s}
def info({:text, <<"SEND", x::binary>>}, r, cx(session: from) = s) do
case :string.tokens(:string.trim(:erlang.binary_to_list(x)), ' ') do
[to | rest] ->
key = KVS.seq([], [])
msg =
CHAT."Pub"(
key: key,
adr: CHAT."Adr"(src: from, dst: {:p2p, CHAT."P2P"(dst: to)}),
bin: :erlang.iolist_to_binary(:string.join(rest, ' '))
)
res =
case CHAT.user(to) do
false -> "ERROR user doesn't exist."
true -> :n2o_ring.send(:ws, :chat, {:publish, self(), from, msg})
<<>>
end
{:reply, {:text, res}, r, s}
_ ->
{:reply, {:text, "ERROR in request."}, r, s}
end
end
def info({:text, <<"BOX">>}, r, cx(session: from) = s) do
KVS.ensure(writer(id: '/chat/' ++ from))
fetch = reader(KVS.take(reader(:kvs.reader('/chat/'++from), args: -1)), :args)
res =
"LIST\n" <>
:erlang.list_to_binary(
:string.join(
for m <- :lists.reverse(fetch) do
format_msg(m)
end,
'\n'
)
)
{:reply, {:text, res}, r, s}
end
def info({:text, "HELP"}, r, s),
do: {:reply, {:text, <<"AUTH <user>\n| SEND <user> <msg>\n| BOX\n| CUT <id>.">>}, r, s}
def info({:text, <<"CUT", x::binary>>}, r, cx(session: from) = s) do
case :string.tokens(:string.trim(:erlang.binary_to_list(x)), ' ') do
[id] ->
case KVS.cut('/chat/' ++ from, id) do
{:ok, count} -> {:reply, {:text, <<"ERASED ", CHAT.bin(count)::binary>>}, r, s}
{:error, _} -> {:reply, {:text, <<"NOT FOUND ">>}, r, s}
end
_ ->
{:reply, {:text, <<"ERROR in request.">>}, r, s}
end
end
def info({:forward, CHAT."Pub"() = m}, r, s),
do: {:reply, {:text, "NOTIFY " <> :erlang.list_to_binary(format_msg(m))}, r, s}
def info({:forward, text}, r, s), do: {:reply, {:text, text}, r, s}
def info({:text, _}, r, s), do: {:reply, {:text, "Try HELP"}, r, s}
def info(msg, r, s), do: {:unknown, msg, r, s}
end
|
lib/txt.ex
| 0.717507
| 0.44897
|
txt.ex
|
starcoder
|
defmodule Blanket.Heir do
@moduledoc """
This modules describes the generic server for the table heirs. Use the
`Blanket` module to create and interact with a heir.
"""
alias :ets, as: ETS
use GenServer
defmodule State do
@moduledoc false
defstruct [tab: nil, owner: nil, mref: nil]
end
defp via(tref) do
{:via, Registry, {Blanket.Registry, tref}}
end
def whereis(tref) do
case Registry.lookup(Blanket.Registry, tref) do
[{pid, _}] -> {:ok, pid}
other -> {:error, {:heir_not_found, other}}
end
end
def pid_or_create(tref, opts) do
case __MODULE__.boot(:create, tref, opts) do
{:ok, pid} ->
{:ok, pid}
{:error, {:already_started, pid}} ->
{:ok, pid}
end
end
def boot(mode, tref, opts) do
Supervisor.start_child(Blanket.Heir.Supervisor, [mode, tref, opts])
end
@doc false
# The :via option is just used so we cannot create two processes with the same
# tref. But we use the pid to send messages to the gen_server.
def start_link(mode, tref, opts) do
GenServer.start_link(__MODULE__, [mode, tref, opts], name: via(tref))
end
def claim(pid, owner) when is_pid(pid) do
pid
|> GenServer.call({:claim, owner})
|> case do
err = {:error, _} -> err
{:ok, tab} ->
receive do
{:'ETS-TRANSFER', ^tab, ^pid, :blanket_giveaway} -> :ok
after
1000 -> raise "Something went wrong"
end
{:ok, tab}
end
end
def attach(pid, tab) do
:ok = set_heir(pid, tab)
GenServer.call(pid, {:attach, tab, self()})
end
# the calling process must own the table.
def detach(pid, tab) do
:ok = remove_heir(tab)
GenServer.call(pid, {:stop, :detach})
end
# the calling process must be the table owner
defp set_heir(pid, tab) do
true = :ets.setopts(tab, [heir_opt(pid)])
:ok
end
defp remove_heir(tab) do
true = :ets.setopts(tab, [{:heir, :none}])
:ok
end
defp heir_opt(pid) when is_pid(pid) do
{:heir, pid, :blanket_heir}
end
# -- Server side -----------------------------------------------------------
def init([:create, tref, opts]) do
case create_table(tref, opts) do
{:ok, tab} ->
{:ok, %State{tab: tab}}
other ->
other
end
end
def init([:recover, _tref, :no_opts]) do
{:ok, %State{}}
end
def handle_call(:stop, _from, state) do
{:stop, :normal, :ok, state}
end
def handle_call({:claim, _new_owner}, _from,
state = %State{owner: owner})
when is_pid(owner) do
{:reply, {:error, :already_owned}, state}
end
def handle_call({:claim, owner}, _from, state = %State{owner: nil, tab: tab})
when is_pid(owner) do
if ETS.info(tab, :owner) === self() do
mref = Process.monitor(owner)
ETS.give_away(tab, owner, :blanket_giveaway)
{:reply, {:ok, tab}, %State{state | mref: mref, owner: owner}}
else
{:reply, {:error, :cannot_giveaway}, state}
end
end
def handle_call({:attach, tab, owner}, _from,
state = %State{owner: nil, tab: nil}) do
mref = Process.monitor(owner)
{:reply, :ok, %State{state | mref: mref, owner: owner, tab: tab}}
end
def handle_call({:stop, :detach}, _from, state) do
{:stop, :normal, :ok, state}
end
def handle_info({:DOWN, mref, :process, owner, _reason},
state = %State{owner: owner, tab: tab, mref: mref}) do
# We receive the 'DOWN' message first, so we wait for the ETS-TRANSFER
receive do
{:'ETS-TRANSFER', ^tab, ^owner, :blanket_heir} ->
{:noreply, reset_owner(state)}
after
5000 -> raise "Transfer not received"
end
end
def handle_info({:'ETS-TRANSFER', tab, owner, :blanket_heir},
state = %State{owner: owner, tab: tab, mref: mref}) do
# We receive the 'ETS-TRANSFER' message first, so we wait for the DOWN
receive do
{:DOWN, ^mref, :process, ^owner, _reason} ->
{:noreply, reset_owner(state)}
after
5000 -> raise "Down message not received"
end
end
def handle_info(_info, state) do
{:noreply, state, :hibernate}
end
defp reset_owner(state),
do: %State{state | owner: nil, mref: nil}
defp create_table(tref, opts) do
# We are creating a new heir, we must also create the table
create_table =
case Keyword.get(opts, :create_table) do
# If the user supplied a module, give back the opts, plus the heir opt
module when is_atom(module) ->
fn() -> apply(module, :create_table, [opts]) end
# If the user supplied options, create a table with those options and
# the heir option.
{tname, table_opts} when is_atom(tname) and is_list(table_opts) ->
fn() -> {:ok, ETS.new(tname, table_opts)} end
fun when is_function(fun, 0) ->
fun
_other ->
raise "Creation method invalid"
end
with {:ok, tab} <- create_table.(),
:ok <- set_heir(self(), tab),
:ok <- Blanket.Metatable.register_table(tab, tref),
do: {:ok, tab}
end
end
|
lib/blanket/heir.ex
| 0.61555
| 0.435481
|
heir.ex
|
starcoder
|
defmodule Parse.VehiclePositions do
@moduledoc """
Parser for the VehiclePositions.pb GTFS-RT file.
"""
@behaviour Parse
alias Model.Vehicle
alias Parse.Realtime.FeedMessage
import Parse.Helpers
def parse("{" <> _ = blob) do
Parse.VehiclePositionsJson.parse(blob)
end
def parse(blob) do
blob
|> FeedMessage.decode()
|> (fn message -> message.entity end).()
|> Stream.map(fn entity -> entity.vehicle end)
|> Stream.map(&parse_vehicle_update/1)
end
def parse_vehicle_update(update) do
%Vehicle{
id: optional_field_copy(update.vehicle, :id),
trip_id: optional_field_copy(update.trip, :trip_id),
route_id: optional_field_copy(update.trip, :route_id),
direction_id: update.trip && update.trip.direction_id,
stop_id: optional_copy(update.stop_id),
label: optional_field_copy(update.vehicle, :label),
latitude: update.position && update.position.latitude,
longitude: update.position && update.position.longitude,
bearing: update.position && update.position.bearing,
speed: update.position && update.position.speed,
current_status: current_status(update.current_status),
current_stop_sequence: update.current_stop_sequence,
updated_at: unix_to_local(update.timestamp),
occupancy_status: occupancy_status(update.occupancy_status)
}
end
defp optional_field_copy(%{} = struct, field) do
optional_copy(Map.get(struct, field))
end
defp optional_field_copy(_, _) do
nil
end
defp current_status(nil) do
:in_transit_to
end
defp current_status(:IN_TRANSIT_TO) do
:in_transit_to
end
defp current_status(:INCOMING_AT) do
:incoming_at
end
defp current_status(:STOPPED_AT) do
:stopped_at
end
defp occupancy_status(nil), do: nil
defp occupancy_status(:EMPTY), do: :empty
defp occupancy_status(:MANY_SEATS_AVAILABLE), do: :many_seats_available
defp occupancy_status(:FEW_SEATS_AVAILABLE), do: :few_seats_available
defp occupancy_status(:STANDING_ROOM_ONLY), do: :standing_room_only
defp occupancy_status(:CRUSHED_STANDING_ROOM_ONLY), do: :crushed_standing_room_only
defp occupancy_status(:FULL), do: :full
defp occupancy_status(:NOT_ACCEPTING_PASSENGERS), do: :not_accepting_passengers
defp unix_to_local(timestamp) when is_integer(timestamp) do
Parse.Timezone.unix_to_local(timestamp)
end
defp unix_to_local(nil) do
DateTime.utc_now()
end
end
|
apps/parse/lib/parse/vehicle_positions.ex
| 0.694199
| 0.440469
|
vehicle_positions.ex
|
starcoder
|
defmodule PhoenixDatatables do
@moduledoc """
Provides the `execute` function which is the primary entry-point to the library, used
by the `Repo.fetch_datatable` function and directly by client applications.
"""
alias PhoenixDatatables.Request
alias PhoenixDatatables.Query
alias PhoenixDatatables.Response
alias PhoenixDatatables.Response.Payload
alias Plug.Conn
@doc """
Prepare and execute a provided query, modified based on the params map. with the results returned in a `Payload` which
can be encoded to json by Phoenix / Poison and consumed by the DataTables client.
## Options
* `:columns` - If columns are not provided, the list of
valid columns to use for filtering and ordering is determined by introspection of the
Ecto query, and the attributes and associations defined in the Schemas used in that
query. This will not always work - Ecto queries may contain subqueries or schema-less queries.
Such queryables will need to be accompanied by `:columns` options.
Even if the queryable uses only schemas and joins built with `assoc` there are security reasons to
provide a `:columns` option.
The client will provide columns
to use for filterng and searching in its request, but client input cannot be trusted. A denial of service
attack could be constructed by requesting search against un-indexed fields on a large table for example.
To harden your server you could limit the on the server-side the sorting and filtering possiblities
by specifying the columns that should be available.
A list of valid columns that are eligibile to be used for sorting and filtering can be passed in
a nested keyword list, where the first keyword is the table name, and second is
the column name and query binding order.
In the below example, the query is a simple join using assoc and could be introspected. `:columns` are
optional.
In the example, `columns` is bound to such a list. Here the 0 means the nsn column belongs to the `from` table,
and there is a `category.name` field, which is the first join table in the query. In the client datatables options,
the column :data attribute should be set to `nsn` for the first column and `category.name` for the second.
* `:total_entries` - Provides a way for the application to use cached values for total_entries; when this
is provided, `phoenix_datatables` won't do a query to get the total record count, instead using
the provided value in the response. The mechanism for cacheing is left up to the application.
```
query =
(from item in Item,
join: category in assoc(item, :category),
select: %{id: item.id, item.nsn, category_name: category.name})
options = [columns: [nsn: 0, category: [name: 1]], total_entries: 25]
Repo.fetch_datatable(query, params, options)
```
"""
@spec execute(Ecto.Queryable.t,
Conn.params,
Ecto.Repo.t,
Keyword.t | nil) :: Payload.t
def execute(query, params, repo, options \\ []) do
params = Request.receive(params)
total_entries = options[:total_entries] || Query.total_entries(query, repo)
filtered_query =
query
|> Query.sort(params, options[:columns])
|> Query.search(params, options)
|> Query.paginate(params)
filtered_entries =
if params.search.value == "" do
total_entries
else
Query.total_entries(filtered_query, repo)
end
filtered_query
|> repo.all()
|> Response.new(params.draw, total_entries, filtered_entries)
end
@doc """
Use the provided function to transform the records embeded in the
Payload, often used in a json view for example
to convert an Ecto schema to a plain map so it can be serialized by Poison.
query
|> Repo.fetch_datatable(params)
|> PhoenixDatatables.map_payload(fn item -> %{
nsn: item.nsn,
category_name: item.category.name}
end)
"""
@spec map_payload(Payload.t, (any -> any)) :: Payload.t
def map_payload(%Payload{} = payload, fun) when is_function(fun) do
%Payload {payload |
data: Enum.map(payload.data, fun)
}
end
end
|
lib/phoenix_datatables.ex
| 0.873552
| 0.89783
|
phoenix_datatables.ex
|
starcoder
|
defmodule Mojito.Telemetry do
@moduledoc ~S"""
Mojito's [Telemetry](https://github.com/beam-telemetry/telemetry)
integration.
All time measurements are emitted in `:millisecond` units by
default. A different
[Erlang time unit](https://erlang.org/doc/man/erlang.html#type-time_unit)
can be chosen by setting a config parameter like so:
```
config :mojito, Mojito.Telemetry, time_unit: :microsecond
```
Mojito emits the following Telemetry events:
* `[:mojito, :pool, :start]` before launching a pool
- Measurements: `:system_time`
- Metadata: `:host`, `:port`
* `[:mojito, :pool, :stop]` after launching a pool
- Measurements: `:system_time`, `:duration`
- Metadata: `:host`, `:port`
* `[:mojito, :connect, :start]` before connecting to a host
- Measurements: `:system_time`
- Metadata: `:host`, `:port`
* `[:mojito, :connect, :stop]` after connecting to a host
- Measurements: `:system_time`, `:duration`
- Metadata: `:host`, `:port`
* `[:mojito, :request, :start]` before making a request
- Measurements: `:system_time`
- Metadata: `:host`, `:port`, `:path`, `:method`
* `[:mojito, :request, :stop]` after making a request
- Measurements: `:system_time`, `:duration`
- Metadata: `:host`, `:port`, `:path`, `:method`
"""
@typep monotonic_time :: integer
defp time_unit do
Application.get_env(:mojito, Mojito.Telemetry)[:time_unit] || :millisecond
end
defp monotonic_time do
:erlang.monotonic_time(time_unit())
end
defp system_time do
:erlang.system_time(time_unit())
end
@doc false
@spec start(atom, map) :: monotonic_time
def start(name, meta \\ %{}) do
start_time = monotonic_time()
:telemetry.execute(
[:mojito, name, :start],
%{system_time: system_time()},
meta
)
start_time
end
@doc false
@spec stop(atom, monotonic_time, map) :: monotonic_time
def stop(name, start_time, meta \\ %{}) do
stop_time = monotonic_time()
duration = stop_time - start_time
:telemetry.execute(
[:mojito, name, :stop],
%{system_time: system_time(), duration: duration},
meta
)
stop_time
end
end
|
lib/mojito/telemetry.ex
| 0.882225
| 0.876634
|
telemetry.ex
|
starcoder
|
defmodule Spat.Geometry.Sphere do
use Bitwise
@doc """
Obtain the indexes of a sphere within the subdivided bounds.
iex> Spat.Geometry.Sphere.index({ 0 }, 1, Spat.Bounds.new({ 10 }), 1)
[[0]]
iex> Spat.Geometry.Sphere.index({ 5 }, 1, Spat.Bounds.new({ 10 }), 1)
[[0], [1]]
iex> Spat.Geometry.Sphere.index({ 10 }, 1, Spat.Bounds.new({ 10 }), 1)
[[1]]
iex> Spat.Geometry.Sphere.index({ -1 }, 1, Spat.Bounds.new({ 10 }), 1)
[]
iex> Spat.Geometry.Sphere.index({ 2.5 }, 1, Spat.Bounds.new({ 10 }), 1)
[[0]]
iex> Spat.Geometry.Sphere.index({ 2.5 }, 1, Spat.Bounds.new({ 10 }), 2)
[[0, 0], [0, 1]]
iex> Spat.Geometry.Sphere.index({ 5, 5 }, 1, Spat.Bounds.new({ 10, 10 }), 2)
[[0, 3], [1, 2], [2, 1], [3, 0]]
iex> Spat.Geometry.Sphere.index({ 2.5, 5 }, 1, Spat.Bounds.new({ 10, 10 }), 2)
[[0, 2], [0, 3], [2, 0], [2, 1]]
iex> Spat.Geometry.Sphere.index({ 0, 0 }, 1, Spat.Bounds.new({ 10, 10 }), 2)
[[0, 0]]
iex> Spat.Geometry.Sphere.index({ 0, 0 }, 20, Spat.Bounds.new({ 10, 10 }), 2)
[[0, 0], [0, 1], [0, 2], [0, 3], [1, 0], [1, 1], [1, 2], [1, 3], [2, 0], [2, 1], [2, 2], [2, 3], [3, 0], [3, 1], [3, 2], [3, 3]]
iex> Spat.Geometry.Sphere.index({ 12.5, 5 }, 1, Spat.Bounds.new({ 10, 0 }, { 20, 10 }), 2)
[[0, 2], [0, 3], [2, 0], [2, 1]]
iex> Spat.Geometry.Sphere.index({ 10, 0 }, 1, Spat.Bounds.new({ 10, 0 }, { 20, 10 }), 2)
[[0, 0]]
iex> Spat.Geometry.Sphere.index({ 0 }, 1, Spat.Bounds.new({ -10 }, { 10 }), 1)
[[0], [1]]
iex> Spat.Geometry.Sphere.index({ -5 }, 1, Spat.Bounds.new({ -10 }, { 10 }), 2)
[[0, 0], [0, 1]]
"""
@spec index(Spat.Coord.t, number, Spat.Bounds.t, pos_integer) :: [Spat.grid_index]
def index(origin, radius, bounds, subdivisions), do: Spat.Geometry.index(&intersect(origin, radius, &1), bounds, subdivisions)
@doc """
Check whether a sphere intersects with the given bounds (equal to or contained
inside).
"""
@spec intersect(Spat.Coord.t, number, Spat.Bounds.t) :: boolean
def intersect(origin, radius, %{ min: min, max: max, dimension: dimension }), do: intersect(origin, radius, min, max, dimension)
@doc false
@spec intersect(Spat.Coord.t, number, Spat.Coord.t, Spat.Coord.t, non_neg_integer, number) :: boolean
defp intersect(origin, radius, min, max, dimension, dist2 \\ 0)
defp intersect(_, radius, _, _, 0, dist2), do: dist2 < (radius * radius)
defp intersect(origin, radius, min, max, dimension, dist2) do
axis = dimension - 1
p = Spat.Coord.get(origin, axis)
start = Spat.Coord.get(min, axis)
stop = Spat.Coord.get(max, axis)
closest = min(max(p, start), stop)
distance = p - closest
dist2 = dist2 + (distance * distance)
intersect(origin, radius, min, max, axis, dist2)
end
end
|
lib/spat/geometry/sphere.ex
| 0.863334
| 0.859369
|
sphere.ex
|
starcoder
|
defmodule Scrip.PendingRenewalInfo do
@moduledoc """
An array of elements that refers to auto-renewable subscription renewals that are open or failed in the past.
Only returned for app receipts that contain auto-renewable subscriptions.
See
https://developer.apple.com/documentation/appstorereceipts/responsebody/pending_renewal_info
"""
@typedoc """
The renewal status for the auto-renewable subscription.
#### Possible Values
`true`
The subscription will renew at the end of the current subscription period.
`false`
The customer has turned off automatic renewal for the subscription.
See: https://developer.apple.com/documentation/appstorereceipts/auto_renew_status
"""
@type auto_renew_status :: boolean()
@typedoc """
An indicator of whether an auto-renewable subscription is in the billing retry period.
#### Possible Values
`true`
The App Store is attempting to renew the subscription.
`false`
The App Store has stopped attempting to renew the subscription.
See: https://developer.apple.com/documentation/appstorereceipts/is_in_billing_retry_period
"""
@type is_in_billing_retry_period :: boolean()
@typedoc """
Refers to auto-renewable subscription renewals that are open or failed in the past.
See: https://developer.apple.com/documentation/appstorereceipts/responsebody/pending_renewal_info#properties
"""
@type t :: %__MODULE__{
auto_renew_product_id: String.t(),
auto_renew_status: auto_renew_status,
expiration_intent: 1..5,
is_in_billing_retry_period: is_in_billing_retry_period,
original_transaction_id: String.t(),
product_id: String.t()
}
@doc """
The `#{__MODULE__}` struct
Contains the auto-renewable subscription renewals that are open or failed in the past.
"""
defstruct [
:auto_renew_product_id,
:auto_renew_status,
:expiration_intent,
:is_in_billing_retry_period,
:original_transaction_id,
:product_id
]
@spec new(response :: map) :: Scrip.PendingRenewalInfo.t()
@doc """
Converts response map to `%#{__MODULE__}` struct
"""
def new(response) do
%__MODULE__{
auto_renew_product_id: response["auto_renew_product_id"],
auto_renew_status: Scrip.Util.to_boolean(response["auto_renew_status"]),
expiration_intent: String.to_integer(response["expiration_intent"]),
is_in_billing_retry_period: Scrip.Util.to_boolean(response["is_in_billing_retry_period"]),
original_transaction_id: response["original_transaction_id"],
product_id: response["product_id"]
}
end
end
|
lib/scrip/pending_renewal_info.ex
| 0.786664
| 0.437283
|
pending_renewal_info.ex
|
starcoder
|
defmodule ElxValidation.Internet do
@moduledoc """
### email
- The field under validation must be formatted as an email address.
### url
- The field under validation must be a valid URL.
### ip
- The field under validation must be an IP address.
### ipv4
- The field under validation must be an IPv4 address.
### ipv6
- The field under validation must be an IPv6 address.
***
```
data = %{
email: "<EMAIL>",
url: "http://google.com",
ip: "192.168.1.1",
ipv4: "192.168.1.1",
ipv6: "2001:0db8:85a3:0000:0000:8a2e:0370:7334"
}
rules = [
%{
field: "email",
validate: ["email"]
},
%{
field: "url",
validate: ["url"]
},
%{
field: "ip",
validate: ["ip"]
},
%{
field: "ipv4",
validate: ["ipv4"]
},
%{
field: "ipv6",
validate: ["ipv6"]
}
]
```
"""
@doc """
check Email Address
"""
def email(target) do
Regex.match?(~r/^[\w-\.]+@([\w-]+\.)+[\w-]{2,4}$/, target)
rescue
_ ->
false
end
@doc """
check Url Address
"""
def url(target) do
Regex.match?(~r/(https?:\/\/)?([\w\-])+\.{1}([a-zA-Z]{2,63})([\/\w-]*)*\/?\??([^#\n\r]*)?#?([^\n\r]*)/, target)
rescue
_ ->
false
end
@doc """
check IP Address IPV4 /IPV6
"""
def ip(target) do
ipv4(target) || ipv6(target)
rescue
_ ->
false
end
@doc """
check IP Address IPV4
"""
def ipv4(target) do
Regex.match?(
~r/^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$/,
target
)
rescue
_ ->
false
end
@doc """
check IP Address IPV6
"""
def ipv6(target) do
Regex.match?(
~r/^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$/,
target
)
rescue
_ ->
false
end
end
|
lib/rules/internet.ex
| 0.583322
| 0.647109
|
internet.ex
|
starcoder
|
defmodule StarkInfra.PixDirector do
alias __MODULE__, as: PixDirector
alias StarkInfra.Utils.Rest
alias StarkInfra.User.Project
alias StarkInfra.User.Organization
alias StarkInfra.Error
@moduledoc """
Groups PixDirector related functions
"""
@doc """
The PixDomain struct displays the domain name and the QR Code domain certificate of Pix participants.
All certificates must be registered with the Central Bank.
# Parameters (required):
- `:name` [string]: name of the PixDirector. ex: "<NAME>".
- `:tax_id` [string]: tax ID (CPF/CNPJ) of the PixDirector. ex: "03.300.300/0001-00"
- `:phone` [string]: phone of the PixDirector. ex: "+551198989898"
- `:email` [string]: email of the PixDirector. ex: "<EMAIL>"
- `:password` [string]: password of the PixDirector. ex: "<PASSWORD>"
- `:team_email` [string]: team email. ex: "<EMAIL>"
- `:team_phones` [list of strings]: list of phones of the team. ex: ["+5511988889999", "+5511988889998"]
## Attributes (return-only):
- `:id` [string]: unique id returned when the PixDirector is created. ex: "5656565656565656"
- `:status` [string]: current PixDirector status. ex: "success"
"""
@enforce_keys [
:name,
:tax_id,
:phone,
:email,
:password,
:team_email,
:team_phones
]
defstruct [
:name,
:tax_id,
:phone,
:email,
:password,
:team_email,
:team_phones,
:id,
:status
]
@type t() :: %__MODULE__{}
@doc """
Send a PixDirector struct for creation in the Stark Infra API
## Parameters (required):
- `:director` [PixDirector struct]: PixDirector struct to be created in the API
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- PixDirector struct with updated attributes
"""
@spec create(
PixDirector.t() | map(),
user: Project.t() | Organization.t() | nil
) ::
{:ok, PixDirector.t()} |
{:error, [error: Error.t()]}
def create(director, options \\ []) do
Rest.post_single(
resource(),
director,
options
)
end
@doc """
Same as create(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec create!(
PixDirector.t() | map(),
user: Project.t() | Organization.t() | nil
) :: any
def create!(director, options \\ []) do
Rest.post_single!(
resource(),
director,
options
)
end
@doc false
def resource() do
{
"PixDirector",
&resource_maker/1
}
end
@doc false
def resource_maker(json) do
%PixDirector{
id: json[:id],
name: json[:name],
tax_id: json[:tax_id],
phone: json[:phone],
email: json[:email],
password: json[:password],
team_email: json[:team_email],
team_phones: json[:team_phones],
status: json[:status]
}
end
end
|
lib/pix_director/pix_director.ex
| 0.779574
| 0.417271
|
pix_director.ex
|
starcoder
|
defmodule Sqlitex.Row do
@moduledoc false
def from(types, columns, rows, into) do
for row <- rows do
build_row(types, columns, row, into)
end
end
defp build_row(_types, _columns, row, :raw_list) do
Tuple.to_list(row)
end
defp build_row(types, columns, row, into) do
types = Enum.map(types, fn type ->
type |> Atom.to_string |> String.downcase
end)
values = row |> Tuple.to_list |> Enum.zip(types) |> Enum.map(&translate_value/1)
columns
|> Enum.zip(values)
|> Enum.into(into)
end
## Convert SQLite values/types to Elixir types
defp translate_value({:undefined, _type}) do
nil
end
# date is empty ""
defp translate_value({"", "date"}), do: nil
defp translate_value({date, "date"}) when is_binary(date), do: to_date(date)
# time is empty ""
defp translate_value({"", "time"}), do: nil
defp translate_value({time, "time"}) when is_binary(time), do: to_time(time)
# datetime is empty ""
defp translate_value({"", "datetime"}), do: nil
# datetime format is "YYYY-MM-DD HH:MM:SS.FFFFFF"
defp translate_value({datetime, "datetime"}) when is_binary(datetime) do
[date, time] = String.split(datetime)
{to_date(date), to_time(time)}
end
defp translate_value({0, "boolean"}), do: false
defp translate_value({1, "boolean"}), do: true
defp translate_value({int, type = <<"decimal", _::binary>>}) when is_integer(int) do
{result, _} = int |> Integer.to_string |> Float.parse
translate_value({result, type})
end
defp translate_value({float, "decimal"}), do: Decimal.from_float(float)
defp translate_value({float, "decimal(" <> rest}) do
[precision, scale] = rest |> string_rstrip(?)) |> String.split(",") |> Enum.map(&(&1 |> String.trim() |> String.to_integer))
Decimal.with_context %Decimal.Context{precision: precision, rounding: :down}, fn ->
float |> Float.round(scale) |> Decimal.from_float |> Decimal.plus
end
end
defp translate_value({val, _type}) do
val
end
defp to_date(date) do
<<yr::binary-size(4), "-", mo::binary-size(2), "-", da::binary-size(2)>> = date
{String.to_integer(yr), String.to_integer(mo), String.to_integer(da)}
end
defp to_time(<<hr::binary-size(2), ":", mi::binary-size(2)>>) do
{String.to_integer(hr), String.to_integer(mi), 0, 0}
end
defp to_time(<<hr::binary-size(2), ":", mi::binary-size(2), ":", se::binary-size(2)>>) do
{String.to_integer(hr), String.to_integer(mi), String.to_integer(se), 0}
end
defp to_time(<<hr::binary-size(2), ":", mi::binary-size(2), ":", se::binary-size(2), ".", fr::binary>>) when byte_size(fr) <= 6 do
fr = String.to_integer(fr <> String.duplicate("0", 6 - String.length(fr)))
{String.to_integer(hr), String.to_integer(mi), String.to_integer(se), fr}
end
if Version.compare(System.version, "1.5.0") == :lt do
defp string_rstrip(string, char), do: String.rstrip(string, char)
else
defp string_rstrip(string, char), do: String.trim_trailing(string, to_string([char]))
end
end
|
deps/sqlitex/lib/sqlitex/row.ex
| 0.541409
| 0.446133
|
row.ex
|
starcoder
|
defmodule ExZipper.Zipper do
@moduledoc """
An Elixir implementation of [Huet's Zipper][huet], with gratitude to <NAME>'s
[Clojure implementation][clojure].
Zippers provide a method of navigating and editing a tree while maintaining
enough state data to reconstruct the tree from the currently focused node.
For the most part, functions defined on `ExZipper.Zipper` return either an
`ExZipper.Zipper` struct or an error tuple of the form `{:error, :error_type}`,
if the function tries to move to a point on the tree that doesn't exist.
This allows easy chaining of functions with a quick failure mode if any function
in the chain returns an error.
[huet]: https://www.st.cs.uni-saarland.de/edu/seminare/2005/advanced-fp/docs/huet-zipper.pdf
[clojure]: https://clojure.github.io/clojure/clojure.zip-api.html
"""
defstruct [:focus, :crumbs, :functions]
@type t :: %__MODULE__{focus: any(), crumbs: nil | map(), functions: map()}
@type error :: {:error, atom}
@type maybe_zipper :: Zipper.t | Zipper.error
@doc """
Returns a new zipper with `root` as the root tree of the zipper, and
`is_branch`, `children` and `make_node` as the internal functions that
define construction parameters for the tree.
In order, the arguments are
1. a function to determine whether a node is a branch
2. a function to return the children of a branch node
3. a function to create a new node from an existing node and a new set of children
4. the root node of the zipper
## Example
iex> zipper = Zipper.zipper( # zipper for nested lists
...> &is_list/1, # a branch can have children, so, a list
...> &(&1), # the children of a list is the list itself
...> fn _node, children -> children end, # a new node is just the new list
...> [1,[2,3,[4,5]]]
...> )
iex> zipper.focus
[1,[2,3,[4,5]]]
"""
@spec zipper(
(any() -> boolean),
(any() -> [any()]),
(any(), [any()] -> any()),
any()
) :: Zipper.t
def zipper(is_branch, children, make_node, root) do
%__MODULE__{
focus: root,
crumbs: nil,
functions: %{
branch?: is_branch,
children: children,
make_node: make_node
}
}
end
@doc """
Returns a new zipper built from the given list
## Example
iex> zipper = Zipper.list_zipper([1,[2,3,[4,5]]])
iex> zipper.focus
[1,[2,3,[4,5]]]
"""
@spec list_zipper(list()) :: Zipper.t
def list_zipper(list) when is_list(list) do
zipper(
&is_list/1,
&(&1),
fn _node, children -> children end,
list
)
end
@doc """
Returns the current focus of the zipper
## Example
iex> zipper = Zipper.list_zipper([1,[2,3,[4,5]]])
iex> Zipper.node(zipper)
[1,[2,3,[4,5]]]
iex> zipper |> Zipper.down |> Zipper.node
1
"""
@spec node(Zipper.t) :: any
def node(%__MODULE__{focus: focus}), do: focus
@doc """
Returns to the root of the zipper. Remains in place if already on the root.
## Examples
iex> zipper = Zipper.list_zipper([1,[],[2,3,[4,5]]])
iex> zipper |> Zipper.root |> Zipper.node
[1,[],[2,3,[4,5]]]
iex> zipper |> Zipper.down |> Zipper.rightmost |> Zipper.down |> Zipper.root |> Zipper.node
[1,[],[2,3,[4,5]]]
"""
@spec root(Zipper.t) :: Zipper.t
def root(zipper = %__MODULE__{crumbs: nil}), do: zipper
def root(zipper = %__MODULE__{}), do: zipper |> up |> root
@doc """
Returns all left siblings of the current focus. Returns an error if called
on the root.
## Examples
iex> zipper = Zipper.list_zipper([1,[],[2,3,[4,5]]])
iex> Zipper.lefts(zipper)
{:error, :lefts_of_root}
iex> zipper |> Zipper.down |> Zipper.lefts
[]
iex> zipper |> Zipper.down |> Zipper.rightmost |> Zipper.lefts
[1,[]]
"""
@spec lefts(Zipper.t) :: [any()] | Zipper.error
def lefts(%__MODULE__{crumbs: nil}), do: {:error, :lefts_of_root}
def lefts(%__MODULE__{crumbs: %{left: left}}), do: Enum.reverse(left)
@doc """
Returns all left right of the current focus. Returns an error if called
on the root.
## Examples
iex> zipper = Zipper.list_zipper([1,[],[2,3,[4,5]]])
iex> Zipper.rights(zipper)
{:error, :rights_of_root}
iex> zipper |> Zipper.down |> Zipper.rights
[[],[2,3,[4,5]]]
iex> zipper |> Zipper.down |> Zipper.rightmost |> Zipper.rights
[]
"""
@spec rights(Zipper.t) :: [any()] | Zipper.error
def rights(%__MODULE__{crumbs: nil}), do: {:error, :rights_of_root}
def rights(%__MODULE__{crumbs: %{right: right}}), do: right
@doc """
Returns a path of nodes leading from the root to, but excluding, the current
focus. Returns an empty list at the root.
## Examples
iex> zipper = Zipper.list_zipper([1,[],[2,3,[4,5]]])
iex> Zipper.path(zipper)
[]
iex> zipper = zipper |> Zipper.down |> Zipper.rightmost |> Zipper.down
iex> zipper.focus
2
iex> Zipper.path(zipper)
[[1,[],[2,3,[4,5]]],[2,3,[4,5]]]
"""
@spec path(Zipper.t) :: [any()]
def path(%__MODULE__{crumbs: nil}), do: []
def path(%__MODULE__{crumbs: %{ppath: paths}}), do: Enum.reverse(paths)
@doc """
Returns true if the current focus of the zipper is a branch, even if it has
no children, false otherwise.
## Examples
iex> zipper = Zipper.list_zipper([1,[],[2,3,[4,5]]])
iex> Zipper.branch?(zipper)
true
iex> zipper |> Zipper.down |> Zipper.branch?
false
iex> zipper |> Zipper.down |> Zipper.right |> Zipper.branch?
true
"""
@spec branch?(Zipper.t) :: boolean
def branch?(zipper = %__MODULE__{}) do
zipper.functions.branch?.(zipper.focus)
end
@doc """
Returns the children of the current focus, or an error if called on a leaf.
## Examples
iex> zipper = Zipper.list_zipper([1,[],[2,3,[4,5]]])
iex> Zipper.children(zipper)
[1,[],[2,3,[4,5]]]
iex> zipper |> Zipper.down |> Zipper.children
{:error, :children_of_leaf}
iex> zipper |> Zipper.down |> Zipper.right |> Zipper.children
[]
"""
@spec children(Zipper.t) :: [any()] | Zipper.error
def children(zipper = %__MODULE__{}) do
case branch?(zipper) do
true -> zipper.functions.children.(zipper.focus)
false -> {:error, :children_of_leaf}
end
end
@doc """
Returns a new node created from `node` and `children`. The `zipper` first argument
is to provide the context from which to determine how to create a new node.
## Examples
iex> zipper = Zipper.list_zipper([1,[],[2,3,[4,5]]])
iex> Zipper.make_node(zipper, [8,9], [10,11,12])
[10,11,12]
"""
@spec make_node(Zipper.t, any(), [any()]) :: any()
def make_node(zipper = %__MODULE__{}, node, children) do
zipper.functions.make_node.(node, children)
end
@doc """
Returns true if a depth-first walkthrough of the zipper has been exhausted.
## Examples
iex> zipper = Zipper.list_zipper([1,[],[2,3,[4,5]]])
iex> zipper = zipper |> Zipper.next |> Zipper.next |> Zipper.next
iex> Zipper.node(zipper)
[2,3,[4,5]]
iex> Zipper.end?(zipper)
false
iex> zipper = zipper |> Zipper.next |> Zipper.next |> Zipper.next |> Zipper.next |> Zipper.next |> Zipper.next
iex> Zipper.node(zipper)
[1,[],[2,3,[4,5]]]
iex> Zipper.end?(zipper)
true
"""
@spec end?(Zipper.t) :: boolean
def end?(%__MODULE__{crumbs: :end}), do: true
def end?(%__MODULE__{}), do: false
@doc """
Returns a flat list of all the elements in the zipper, ordered via a
depth-first walk, including the root
## Examples
iex> zipper = Zipper.list_zipper([1,[],[2,3,[4,5]]])
iex> Zipper.to_list(zipper)
[[1,[],[2,3,[4,5]]], 1, [], [2,3,[4,5]], 2, 3, [4,5], 4, 5]
"""
@spec to_list(Zipper.t) :: [any()]
def to_list(zipper = %__MODULE__{}), do: _to_list(zipper, [])
defdelegate down(zipper), to: ExZipper.Zipper.Navigation
defdelegate up(zipper), to: ExZipper.Zipper.Navigation
defdelegate right(zipper), to: ExZipper.Zipper.Navigation
defdelegate left(zipper), to: ExZipper.Zipper.Navigation
defdelegate rightmost(zipper), to: ExZipper.Zipper.Navigation
defdelegate leftmost(zipper), to: ExZipper.Zipper.Navigation
defdelegate next(zipper), to: ExZipper.Zipper.Navigation
defdelegate prev(zipper), to: ExZipper.Zipper.Navigation
defdelegate insert_right(zipper, node), to: ExZipper.Zipper.Editing
defdelegate insert_left(zipper, node), to: ExZipper.Zipper.Editing
defdelegate insert_child(zipper, node), to: ExZipper.Zipper.Editing
defdelegate append_child(zipper, node), to: ExZipper.Zipper.Editing
defdelegate replace(zipper, node), to: ExZipper.Zipper.Editing
defdelegate edit(zipper, node), to: ExZipper.Zipper.Editing
defdelegate remove(zipper), to: ExZipper.Zipper.Editing
## Private
defp _to_list(zipper, acc) do
case end?(zipper) do
true -> Enum.reverse(acc)
false -> _to_list(next(zipper), [__MODULE__.node(zipper)|acc])
end
end
end
|
lib/ex_zipper/zipper.ex
| 0.886537
| 0.697664
|
zipper.ex
|
starcoder
|
defmodule Day21 do
def part_one(input) do
21
|> input.contents_of(:stream)
|> Stream.map(&String.trim/1)
|> Stream.map(fn line -> String.split(line, ":") end)
|> Stream.map(fn [_prefix, pos] -> pos end)
|> Stream.map(&String.trim/1)
|> Enum.map(&String.to_integer/1)
|> play(deterministic())
end
def part_two(input) do
21
|> input.contents_of(:stream)
|> Stream.map(&String.trim/1)
|> Stream.map(fn line -> String.split(line, ":") end)
|> Stream.map(fn [_prefix, pos] -> pos end)
|> Stream.map(&String.trim/1)
|> Enum.map(&String.to_integer/1)
|> then(fn [pos1, pos2] -> %{{{pos1, 0}, {pos2, 0}} => 1} end)
|> play_dirac([0, 0])
end
defp play_dirac(universes, wins) when universes == %{}, do: Enum.max(wins)
defp play_dirac(universes, [cur_wins, other_wins]) do
{next_universes, wins} = dirac_step(universes)
play_dirac(next_universes, [other_wins, cur_wins + wins])
end
defp dirac_step(universes) do
{wins, next_universes} =
universes
|> Enum.flat_map(fn {{{old_pos, score}, player2}, count} ->
[{3, 1}, {4, 3}, {5, 6}, {6, 7}, {7, 6}, {8, 3}, {9, 1}]
|> Enum.map(fn {step, freq} ->
pos = advance(old_pos, step)
{{player2, {pos, score + pos}}, count * freq}
end)
end)
|> Enum.split_with(fn {{_player1, {_pos, score}}, _count} -> score >= 21 end)
next_universes =
next_universes
|> Enum.reduce(%{}, fn {state, count}, map ->
Map.update(map, state, count, &(&1 + count))
end)
{next_universes, wins |> Enum.map(fn {_players, count} -> count end) |> Enum.sum()}
end
defp play([p1, p2], die) do
die
|> Stream.chunk_every(3)
|> Stream.map(&sum/1)
|> Stream.with_index()
|> Enum.reduce_while([{p1, 0}, {p2, 0}], fn
{_step, rolls}, [{_pos1, losing}, {_pos2, score}] when score >= 1000 ->
{:halt, 3 * rolls * losing}
{step, _rolls}, [{old_pos, score}, player2] ->
pos = advance(old_pos, step)
{:cont, [player2, {pos, score + pos}]}
end)
end
def advance(pos, step) do
case rem(pos + step, 10) do
0 -> 10
i -> i
end
end
defp deterministic() do
Stream.cycle(1..100)
end
defp sum(l), do: Enum.reduce(l, &Kernel.+/2)
end
|
year_2021/lib/day_21.ex
| 0.500488
| 0.471771
|
day_21.ex
|
starcoder
|
defmodule Yggdrasil.Publisher.Adapter do
@moduledoc """
Publisher adapter behaviour.
"""
alias Yggdrasil.Channel
alias Yggdrasil.Registry
@doc """
Callback to start a publisher with a `namespace` and some `GenServer`
`options`.
"""
@callback start_link(
namespace :: atom(),
options :: GenServer.options()
) :: GenServer.on_start()
@doc """
Callback for publishing a `message` in a `channel` using a `publisher`.
"""
@callback publish(
publisher :: GenServer.server(),
channel :: Channel.t(),
message :: term()
) :: :ok | {:error, term()}
@doc """
Publishes a `message` in a `channel` using a `publisher` and some `options`.
"""
@callback publish(
publisher :: GenServer.server(),
channel :: Channel.t(),
message :: term(),
options :: keyword()
) :: :ok | {:error, term()}
@doc """
Use to implement `Yggdrasil.Publisher.Adapter` behaviour.
"""
defmacro __using__(_) do
quote do
@behaviour Yggdrasil.Publisher.Adapter
@doc """
Starts a publisher adapter for an adapter given a `namespace`.
Optionally, receives `GenServer` `options`.
"""
@impl Yggdrasil.Publisher.Adapter
def start_link(namespace, options \\ [])
def start_link(namespace, options) do
GenServer.start_link(__MODULE__, namespace, options)
end
@doc """
Publishes a `message` in a `channel` using a `publisher`.
"""
@impl Yggdrasil.Publisher.Adapter
def publish(publisher, channel, message, options \\ [])
def publish(publisher, %Channel{} = channel, message, _options) do
GenServer.call(publisher, {:publish, channel, message})
end
defoverridable start_link: 1,
start_link: 2,
publish: 3,
publish: 4
end
end
@doc """
Generic publisher adapter starter that receives a `channel` and an optional
`GenServer` options.
"""
@spec start_link(Channel.t()) :: GenServer.on_start()
@spec start_link(Channel.t(), GenServer.options()) :: GenServer.on_start()
def start_link(channel, options \\ [])
def start_link(
%Channel{
adapter: adapter,
namespace: namespace
},
options
) do
with {:ok, module} <- Registry.get_publisher_module(adapter) do
module.start_link(namespace, options)
end
end
@doc """
Generic publisher adapter publish function. Publisher a `message` in a
`channel` using a `publisher` and some `options`.
"""
@spec publish(GenServer.server(), Channel.t(), term(), keyword()) ::
:ok | {:error, term()}
def publish(publish, channel, message, options)
def publish(
publisher,
%Channel{adapter: adapter} = channel,
message,
options
) do
with {:ok, module} <- Registry.get_publisher_module(adapter) do
module.publish(publisher, channel, message, options)
end
end
@doc """
Generic publisher adapter stopper that receives the `pid`.
"""
@spec stop(GenServer.server()) :: :ok
def stop(pid) do
GenServer.stop(pid)
end
end
|
lib/yggdrasil/publisher/adapter.ex
| 0.876449
| 0.455501
|
adapter.ex
|
starcoder
|
defmodule Guss.StorageV2Signer do
@moduledoc """
Sign a `Guss.Resource` using the Cloud Storage V2 Signing Process for Service Accounts.
This module generates the _string to sign_ for a ` Guss.Resource`,
and signs it using the given `private_key`. The signature is then
added to the URL, along with any required query string parameters.
For more information, see:
[V2 Signing Process](https://cloud.google.com/storage/docs/access-control/signed-urls-v2).
"""
alias Guss.{Resource, RequestHeaders, Signature}
@doc """
Sign a URL for the given `Guss.Resource` using the `private_key`.
"""
@spec sign(resource :: Guss.Resource.t(), private_key :: binary()) :: {:ok, String.t()}
def sign(%Resource{} = resource, private_key) when is_binary(private_key) do
s2s = string_to_sign(resource)
with {:ok, signature} <- Signature.generate(s2s, private_key) do
signed_url = build_url(resource, signature)
{:ok, signed_url}
end
end
@doc """
Generates the _string to sign_ for a `Guss.Resource`.
The _string to sign_ is a canonical representation of
the request to be made with the Signed URL.
"""
@spec string_to_sign(Guss.Resource.t()) :: String.t()
def string_to_sign(%Resource{} = resource) do
headers_to_sign =
resource
|> Resource.signed_headers()
|> headers_to_sign()
http_verb = http_verb(resource.http_verb)
resource_name = resource_name(resource)
content_md5 = if is_nil(resource.content_md5), do: "", else: resource.content_md5
content_type = if is_nil(resource.content_type), do: "", else: resource.content_type
[
http_verb,
content_md5,
content_type,
Integer.to_string(resource.expires),
headers_to_sign,
resource_name
]
|> Enum.intersperse(?\n)
|> IO.iodata_to_binary()
end
defp resource_name(%{bucket: bucket, objectname: objectname}) when not is_nil(bucket) do
[?/, bucket, ?/, objectname]
end
defp resource_name(%{objectname: objectname}) do
[?/, objectname]
end
defp http_verb(method) when is_atom(method), do: http_verb(Atom.to_string(method))
defp http_verb(method) when is_binary(method), do: String.upcase(method)
defp headers_to_sign([]), do: []
defp headers_to_sign(headers) when is_list(headers) do
for {k, v} <- RequestHeaders.deduplicate(headers),
filter_extension({k, v}) do
[k, ?:, v]
end
|> Enum.intersperse(?\n)
|> List.wrap()
end
defp filter_extension({"x-goog-encryption" <> _rest, _}), do: false
defp filter_extension({"x-goog-" <> _rest, _}), do: true
defp filter_extension(_kv), do: false
defp build_url(%Guss.Resource{} = resource, signature) when is_binary(signature) do
query = resource |> build_signed_query(signature) |> URI.encode_query()
Enum.join([to_string(resource), "?", query])
end
defp build_signed_query(%Guss.Resource{account: account, expires: expires}, signature) do
%{
"GoogleAccessId" => account,
"Expires" => expires,
"Signature" => signature
}
end
end
|
lib/guss/storage_v2_signer.ex
| 0.867794
| 0.49823
|
storage_v2_signer.ex
|
starcoder
|
defmodule Strava.Route do
@moduledoc """
Routes are manually-created paths made up of sections called legs. Currently it is only possible to create routes using the Routebuilder web interface.
https://strava.github.io/api/v3/routes/
"""
import Strava.Util, only: [parse_timestamp: 1]
@type t :: %__MODULE__{
id: number,
resource_state: number,
name: String.t,
description: String.t,
athlete: Strava.Athlete.Summary.t,
distance: float,
elevation_gain: float,
map: Strava.Map.t,
type: number,
sub_type: number,
private: boolean,
starred: boolean,
timestamp: NaiveDateTime.t,
segments: list(Strava.Segment.t)
}
defstruct [
:id,
:resource_state,
:name,
:description,
:athlete,
:distance,
:elevation_gain,
:map,
:type,
:sub_type,
:private,
:starred,
:timestamp,
:segments,
]
@doc """
Retrieve details about a specific route.
## Example
Strava.Route.retrieve(2751038)
More info: https://strava.github.io/api/v3/routes/#retrieve
"""
@spec retrieve(integer, Strava.Client.t) :: Strava.Route.t
def retrieve(id, client \\ Strava.Client.new) do
"routes/#{id}"
|> Strava.request(client, as: %Strava.Route{})
|> parse
end
@doc """
Lists a specific athleteβs routes. Private routes will only be included if the authenticating user is viewing their own routes and the token has view_private permissions.
Doesn't support pagination.
## Example
Strava.Segment.list_routes(3920819)
More info: https://strava.github.io/api/v3/routes/#list
"""
@spec list_routes(integer, Strava.Client.t) :: list(Strava.Route.t)
def list_routes(athlete_id, client \\ Strava.Client.new) do
list_routes_request(athlete_id, client)
end
@spec list_routes_request(integer, Strava.Client.t) :: list(Strava.Route.t)
defp list_routes_request(athlete_id, client) do
"athletes/#{athlete_id}/routes"
|> Strava.request(client, as: [%__MODULE__{}])
|> Enum.map(&parse/1)
end
@doc """
Parse the map and dates in the route
"""
@spec parse(Strava.Route.t) :: Strava.Route.t
def parse(%Strava.Route{} = route) do
route
|> parse_map
|> parse_dates
|> parse_athlete
|> parse_segments
end
@spec parse_map(Strava.Route.t) :: Strava.Route.t
defp parse_map(%Strava.Route{map: nil} = route), do: route
defp parse_map(%Strava.Route{map: map} = route) do
%Strava.Route{route |
map: struct(Strava.Map, map)
}
end
@spec parse_dates(Strava.Route.t) :: Strava.Route.t
defp parse_dates(%Strava.Route{timestamp: timestamp} = route) do
%Strava.Route{route |
timestamp: parse_timestamp(timestamp),
}
end
@spec parse_athlete(Strava.Route.t) :: Strava.Route.t
defp parse_athlete(%Strava.Route{athlete: athlete} = route) do
%Strava.Route{route |
athlete: struct(Strava.Athlete.Summary, athlete) |> Strava.Athlete.Summary.parse()
}
end
@spec parse_segments(Strava.Route.t) :: Strava.Route.t
defp parse_segments(%Strava.Route{segments: nil} = route), do: route
defp parse_segments(%Strava.Route{segments: segments} = route) do
%Strava.Route{route |
segments: Enum.map(segments, fn segment ->
struct(Strava.Segment, segment) |> Strava.Segment.parse()
end)
}
end
end
|
lib/strava/route.ex
| 0.876039
| 0.465387
|
route.ex
|
starcoder
|
defmodule Nostrum.Cache.PresenceCache.ETS do
@tablename :presences
@moduledoc """
ETS-based cache for user presences.
The ETS table name associated with the User Cache is `#{@tablename}`. Besides
the methods provided below you can call any other ETS methods on the table.
If you need to access the name of the presence cache's table
programmatically, use the `tabname/0` function instead of hardcoding it in
your application.
## Example
```elixir
info = :ets.info(#{@tablename})
[..., heir: :none, name: #{@tablename}, size: 1, ...]
size = info[:size]
1
```
"""
@moduledoc since: "0.5"
@behaviour Nostrum.Cache.PresenceCache
alias Nostrum.Struct.{Guild, User}
import Nostrum.Snowflake, only: [is_snowflake: 1]
use Supervisor
@doc "Start the supervisor."
def start_link(init_arg) do
Supervisor.start_link(__MODULE__, init_arg, name: __MODULE__)
end
@doc "Set up the cache's ETS table."
@impl Supervisor
def init(_init_arg) do
:ets.new(@tablename, [:set, :public, :named_table])
Supervisor.init([], strategy: :one_for_one)
end
@doc "Return the ETS table name used for this cache."
@spec tabname :: atom()
def tabname do
@tablename
end
@impl Nostrum.Cache.PresenceCache
@doc "Retrieves a presence for a user from the cache by guild and id."
@spec get(User.id(), Guild.id()) :: {:error, :presence_not_found} | {:ok, map}
def get(user_id, guild_id) when is_snowflake(user_id) and is_snowflake(guild_id) do
case :ets.lookup(@tablename, {user_id, guild_id}) do
[] -> {:error, :presence_not_found}
[{{^user_id, ^guild_id}, presence}] -> {:ok, presence}
end
end
@impl Nostrum.Cache.PresenceCache
@doc "Add the given presence data to the cache."
@spec create(map) :: :ok
def create(presence) do
:ets.insert(@tablename, {{presence.user.id, presence.guild_id}, presence})
:ok
end
@impl Nostrum.Cache.PresenceCache
@doc "Update the given presence data in the cache."
@spec update(map) :: {Guild.id(), nil | map, map} | :noop
def update(presence) do
case get(presence.user.id, presence.guild_id) do
{:ok, p} ->
new_presence = Map.merge(p, presence)
create(new_presence)
if p.activities == new_presence.activities and p.status == new_presence.status,
do: :noop,
else: {presence.guild_id, p, new_presence}
{:error, _} ->
create(presence)
{presence.guild_id, nil, presence}
end
end
@impl Nostrum.Cache.PresenceCache
@doc "Bulk create multiple presences in the cache."
@spec bulk_create(Guild.id(), [map]) :: :ok
def bulk_create(_, []), do: :ok
def bulk_create(guild_id, presences) when is_list(presences) do
Enum.each(presences, fn p ->
:ets.insert(@tablename, {{p.user.id, guild_id}, p})
end)
end
end
|
lib/nostrum/cache/presence_cache/ets.ex
| 0.839865
| 0.720884
|
ets.ex
|
starcoder
|
defmodule Membrane.MP4.Payloader.AAC do
@moduledoc """
Payloads AAC stream so it can be embedded in MP4.
Resources:
- Packaging/Encapsulation And Setup Data section of https://wiki.multimedia.cx/index.php/Understanding_AAC
"""
use Membrane.Filter
alias Membrane.Buffer
def_input_pad :input, demand_unit: :buffers, caps: {Membrane.AAC, encapsulation: :none}
def_output_pad :output, caps: Membrane.MP4.Payload
def_options avg_bit_rate: [
type: :integer,
default: 0,
description: "Average stream bitrate. Should be set to 0 if unknown."
],
max_bit_rate: [
type: :integer,
default: 0,
description: "Maximal stream bitrate. Should be set to 0 if unknown."
]
@impl true
def handle_caps(:input, caps, _ctx, state) do
caps = %Membrane.MP4.Payload{
content: %Membrane.MP4.Payload.AAC{
esds: make_esds(caps, state),
sample_rate: caps.sample_rate,
channels: caps.channels
},
timescale: caps.sample_rate
}
{{:ok, caps: {:output, caps}}, state}
end
@impl true
def handle_process(:input, buffer, _ctx, state) do
# we set DTS=PTS, as ISO base media file format specification uses DTS for calculating deltas
buffer = %Buffer{buffer | dts: buffer.pts}
{{:ok, buffer: {:output, buffer}}, state}
end
@impl true
def handle_demand(:output, size, :buffers, _ctx, state) do
{{:ok, demand: {:input, size}}, state}
end
defp make_esds(caps, state) do
aot_id = Membrane.AAC.profile_to_aot_id(caps.profile)
frequency_id = Membrane.AAC.sample_rate_to_sampling_frequency_id(caps.sample_rate)
channel_config_id = Membrane.AAC.channels_to_channel_config_id(caps.channels)
frame_length_id = Membrane.AAC.samples_per_frame_to_frame_length_id(caps.samples_per_frame)
depends_on_core_coder = 0
extension_flag = 0
section5 =
<<aot_id::5, frequency_id::4, channel_config_id::4, frame_length_id::1,
depends_on_core_coder::1, extension_flag::1>>
|> make_esds_section(5)
# 64 = mpeg4-audio
object_type_id = 64
# 5 = audio
stream_type = 5
upstream_flag = 0
reserved_flag_set_to_1 = 1
buffer_size = 0
section4 =
<<object_type_id, stream_type::6, upstream_flag::1, reserved_flag_set_to_1::1,
buffer_size::24, state.max_bit_rate::32, state.avg_bit_rate::32, section5::binary>>
|> make_esds_section(4)
section6 = <<2>> |> make_esds_section(6)
elementary_stream_id = 1
stream_priority = 0
<<elementary_stream_id::16, stream_priority, section4::binary, section6::binary>>
|> make_esds_section(3)
end
defp make_esds_section(payload, section_no) do
type_tag = <<128, 128, 128>>
<<section_no, type_tag::binary, byte_size(payload), payload::binary>>
end
end
|
lib/membrane_mp4/payloader/aac.ex
| 0.841988
| 0.405861
|
aac.ex
|
starcoder
|
defmodule Attrition do
@moduledoc """
Attrition provides the ability to display specific data HTML attributes
based on the configuration of your mix environment.
For example, testing and QA can be performed using the `data-qa` or `data-test` attribute,
while these attributes are effectively removed from your production markup.
Attrition accomplishes this through the use of a compile time macro that injects
overrideable functions.
If correctly configured and enabled, Attrition provided functions return
HTML attributes that can be utilized for testing, QA and beyond.
If no cofiguration is present, Attrition provided functions simply return
an empty string, thus obfuscating their contents in non-configured envrionments.
The intentional default redaction of test data and attributes reduces the risk
of scraping or accidentally exposing sensitive data.
Currently Attrition only supports the `data-qa` and `data-test`
HTML attributes.
> develop |> attrition |> deploy
## Installation
Attrition can be installed by adding `attrition` to your list of dependencies in `mix.exs`:
```elixir
def deps do
[
{:attrition, "~> 0.1.0"}
]
end
```
### Fetch the dependencies
```shell
mix deps.get
```
## Setup
Setup for attrition can be accomplished in two easy steps!
### 1. Environment Configuration
In the configuration file for the environment you wish to render the
data attributes, you must set the `Attrition.Reveal` module as the
value for the `:data_module` key.
For example:
```elixir
config :attrition, data_module: Attrition.Reveal
```
**Note** After updating your configuration, you must force the Attrition
dependency to recompile in order to pick up on the configuration change.
```shell
mix deps.compile attrition --force
```
The absence of a configuration, or an invalid configuration will
result in no data attributes displayed.
### 2. `Use` Attrition
Ideally, Attrition is invoked at the view definition through
the `use` macro. This allows for Attrition provided functions
to be called in both the view and template without needing to
provide an alias. This implementation provides a simple,
light-weight interface without additional cognitive load.
```elixir
# application_web.ex
def view do
quote do
...
use Attrition
end
end
```
## Usage
Once setup and configuration is complete, using Attrition
provided functions is very straightforward. These functions
can be invoked at both the view and template. All attrition provided
functions can also be overridden wherever they are used.
Example implementation of the `data_qa` function:
```elixir
<div<%= data_qa "example-count" %> class="example">
```
**NOTE**: In the example above, make note of the spacing. Ensure that
there is not a space between the element `<div` and the opening output capture
tag `<%=`. This will ensure the resulting html is formatted correctly.
Example enabled attribute output:
```html
<div data-qa="example-count" class="example">
```
Hidden attribute output:
```html
<div class="example">
```
## Testing and Developing with data attributes
### Find the data-qa attribute value using Floki
[Floki](https://hex.pm/packages/floki) is a simple HTML parser that
can quickly traverse HTML nodes. You can read more about Floki
[here](https://hexdocs.pm/floki/Floki.html).
Finding your data-qa attribute with Floki example
```elixir
{:ok, html} = Floki.parse_document(html)
Floki.find(html, "[data-qa=example-count]")
```
### View data-qa elements in the Browser
Using a browser extension, such as [data-qa Highlighter](https://chrome.google.com/webstore/detail/data-qa-highlighter/idhhdaefanknhldagkhodblcpifdddcf?hl=en)
you can easily view the elements on the page that have the data-qa attribute.

"""
@callback data_qa(value :: any()) :: String.t() | {:safe, String.t()}
@callback data_test(value :: any()) :: String.t() | {:safe, String.t()}
@data_module Application.get_env(:attrition, :data_module, Attrition.Hide)
@doc """
Injects a function definition based on the mix env
configuration module that is passed.
No arguments are given.
The macro defines helper functions once at
compile-time rather than checking for configuration
with each function call.
The functions generated are overrideable.
"""
@spec __using__([]) :: Macro.t()
defmacro __using__([]) do
quote do
@behaviour Attrition
@impl Attrition
def data_qa(value) do
Attrition.data_module().data_qa(value)
end
@impl Attrition
def data_test(value) do
Attrition.data_module().data_test(value)
end
defoverridable Attrition
end
end
@spec data_module :: atom()
def data_module, do: @data_module
defmodule Reveal do
@moduledoc """
`Attrition.Reveal` returns the "real" versions of data functions,
revealing the passed value contents to your markup.
This module is for configured environments only.
"""
@type safe_string :: {:safe, String.t()}
@doc """
Returns :safe HTML string that has interior quotes of interpolated
string escaped with prepended whitespace padding.
"""
@spec data_qa(String.t()) :: safe_string()
def data_qa(string) when is_binary(string), do: {:safe, ~s( data-qa="#{string}")}
@doc """
Returns :safe HTML string that has interior quotes of interpolated
string escaped with prepended whitespace padding.
"""
@spec data_test(String.t()) :: safe_string()
def data_test(string) when is_binary(string), do: {:safe, ~s( data-test="#{string}")}
end
defmodule Hide do
@moduledoc """
`Attrition.Hide` returns the noop versions of data functions, essentially
"hiding" them by returning an empty string into markup.
This is the default module for unconfigured or miconfigured
environments; preventing sensitive data from leaking into production
inadvertantly.
"""
@doc """
Returns empty string regardless of argument.
"""
@spec data_qa(any()) :: String.t()
def data_qa(_), do: ""
@doc """
Returns empty string regardless of argument.
"""
@spec data_test(any()) :: String.t()
def data_test(_), do: ""
end
end
|
lib/attrition.ex
| 0.94098
| 0.964954
|
attrition.ex
|
starcoder
|
defmodule Plug.Cowboy.Drainer do
@moduledoc """
Process to drain cowboy connections at shutdown.
When starting `Plug.Cowboy` in a supervision tree, it will create a listener that receives
requests and creates a connection process to handle that request. During shutdown, a
`Plug.Cowboy` process will immediately exit, closing the listener and any open connections
that are still being served. In most cases, it is desireable to allow connections to
complete before shutting down.
This module provides a process that during shutdown will close listeners and wait
for connections to complete. It should be placed after other supervised processes that
handle cowboy connections.
## Options
The following options can be given to the child spec:
* `:refs` - A list of refs to drain. `:all` is also supported and will drain all cowboy
listeners, including those started by means other than `Plug.Cowboy`.
* `:id` - The ID for the process.
Defaults to `Plug.Cowboy.Drainer`.
* `:shutdown` - How long to wait for connections to drain.
Defaults to 5000ms.
* `:drain_check_interval` - How frequently to check if a listener's
connections have been drained.
Defaults to 1000ms.
## Examples
# In your application
def start(_type, _args) do
children = [
{Plug.Cowboy, scheme: :http, plug: MyApp, options: [port: 4040]},
{Plug.Cowboy, scheme: :https, plug: MyApp, options: [port: 4041]},
{Plug.Cowboy.Drainer, refs: [MyApp.HTTP, MyApp.HTTPS]}
]
opts = [strategy: :one_for_one, name: MyApp.Supervisor]
Supervisor.start_link(children, opts)
end
"""
use GenServer
@doc false
@spec child_spec(opts :: Keyword.t()) :: Supervisor.child_spec()
def child_spec(opts) when is_list(opts) do
{spec_opts, opts} = Keyword.split(opts, [:id, :shutdown])
Supervisor.child_spec(
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :worker
},
spec_opts
)
end
@doc false
def start_link(opts) do
opts
|> Keyword.fetch!(:refs)
|> validate_refs!()
GenServer.start_link(__MODULE__, opts)
end
@doc false
@impl true
def init(opts) do
Process.flag(:trap_exit, true)
{:ok, opts}
end
@doc false
@impl true
def terminate(_reason, opts) do
opts
|> Keyword.fetch!(:refs)
|> drain(Keyword.get(opts, :drain_check_interval, 1_000))
end
defp drain(:all, drain_check_interval) do
:ranch.info()
|> Enum.map(&elem(&1, 0))
|> drain(drain_check_interval)
end
defp drain(refs, drain_check_interval) do
refs
|> Enum.filter(&suspend_listener/1)
|> Enum.each(&wait_for_connections(&1, drain_check_interval))
end
defp suspend_listener(ref) do
:ranch.suspend_listener(ref) == :ok
end
defp wait_for_connections(ref, drain_check_interval) do
:ranch.wait_for_connections(ref, :==, 0, drain_check_interval)
end
defp validate_refs!(:all), do: :ok
defp validate_refs!(refs) when is_list(refs), do: :ok
defp validate_refs!(refs) do
raise ArgumentError,
":refs should be :all or a list of references, got: #{inspect(refs)}"
end
end
|
lib/plug/cowboy/drainer.ex
| 0.813498
| 0.441914
|
drainer.ex
|
starcoder
|
defmodule Paasaa do
@moduledoc """
Provides language detection functions
## Examples
iex> Paasaa.detect "Detect this!"
"eng"
"""
@scripts Paasaa.Data.scripts()
@languages Paasaa.Data.languages()
@max_difference 300
@type result :: [{language :: String.t(), score :: number}]
@type options :: [
min_length: integer,
max_length: integer,
whitelist: [String.t()],
blacklist: [String.t()]
]
@default_options [
min_length: 10,
max_length: 2048,
whitelist: [],
blacklist: []
]
@und [{"und", 1}]
@doc """
Detects a language. Returns a string with ISO6393 language code (e.g. "eng").
## Parameters
- `str` - a text string
- `options` - a keyword list with options:
- `:min_length` - If the text is shorter than `:min_length` it will return `und`. Default: `10`.
- `:max_length` - Maximum length to analyze. Default: `2048`.
- `:whitelist` - Allow languages. Default: `[]`.
- `:blacklist` - Disallow languages. Default: `[]`.
## Examples
Detect a string:
iex> Paasaa.detect "Detect this!"
"eng"
With the `:blacklist` option:
iex> Paasaa.detect "Detect this!", blacklist: ["eng"]
"sco"
With the `:min_length` option:
iex> Paasaa.detect "ΠΡΠΈΠ²Π΅Ρ", min_length: 6
"rus"
It returns `und` for undetermined language:
iex> Paasaa.detect "1234567890"
"und"
"""
@spec detect(str :: String.t(), options) :: language :: String.t()
def detect(str, options \\ @default_options) do
str
|> all(options)
|> List.first()
|> elem(0)
end
@doc """
Detects a language. Returns a list of languages scored by probability.
## Parameters
- `str` - a text string
- `options` - a keyword list with options, see `detect/2` for details.
## Examples
Detect language and limit results to 5:
iex> Paasaa.all("Detect this!") |> Enum.take(5)
[
{"eng", 1.0},
{"sco", 0.8675529295913343},
{"nob", 0.6065977351058591},
{"swe", 0.5923190546528804},
{"nno", 0.5534219596258001}
]
"""
@spec all(str :: String.t(), options) :: result
def all(str, options \\ @default_options)
def all("", _), do: @und
def all(nil, _), do: @und
def all(str, options) do
options = Keyword.merge(@default_options, options)
if String.length(str) < options[:min_length] do
@und
else
process(str, options)
end
end
@spec process(str :: String.t(), options) :: result
defp process(str, options) do
str = String.slice(str, 0, options[:max_length])
{script, count} = detect_script(str)
cond do
count == 0 ->
@und
Map.has_key?(@languages, script) ->
str
|> get_clean_trigrams
|> get_distances(@languages[script], options)
|> normalize(str)
true ->
if allowed?(script, options) do
[{script, 1}]
else
@und
end
end
end
defp allowed?(lang, options) do
white = options[:whitelist]
black = options[:blacklist]
(Enum.empty?(white) || Enum.member?(white, lang)) && !Enum.member?(black, lang)
end
@doc """
Detects a script.
## Parameters
- `str` - a text string
## Examples
iex> Paasaa.detect_script("Detect this!")
{"Latin", 0.8333333333333334}
"""
@spec detect_script(str :: String.t()) :: {String.t(), number}
def detect_script(str) do
len = String.length(str)
@scripts
|> Enum.map(fn {name, re} -> {name, get_occurrence(str, re, len)} end)
|> Enum.max_by(fn {_, count} -> count end)
end
@spec get_occurrence(str :: String.t(), re :: Regex.t(), str_len :: non_neg_integer) :: float
defp get_occurrence(str, re, str_len) do
Enum.count(Regex.scan(re, str)) / str_len
end
@spec get_distances([String.t()], Enumerable.t(), options) :: result
defp get_distances(trigrams, languages, options) do
languages
|> filter_languages(options)
|> Enum.map(fn {lang, model} -> {lang, get_distance(trigrams, model)} end)
|> Enum.sort(&(elem(&1, 1) < elem(&2, 1)))
end
@spec get_distance([String.t()], Enumerable.t()) :: number
defp get_distance(trigrams, model) do
Enum.reduce(trigrams, 0, fn {name, val}, distance ->
distance +
if Map.has_key?(model, name) do
abs(val - model[name] - 1)
else
@max_difference
end
end)
end
@spec filter_languages([String.t()], Enumerable.t()) :: Enumerable.t()
defp filter_languages(languages, options) do
white = options[:whitelist]
black = options[:blacklist]
if Enum.empty?(white) && Enum.empty?(black) do
languages
else
Enum.filter(languages, fn {lang, _} ->
allowed?(lang, options)
end)
end
end
@spec normalize(result, String.t()) :: result
defp normalize([], _str), do: @und
defp normalize(distances, str) do
min = distances |> List.first() |> elem(1)
max = String.length(str) * @max_difference - min
Enum.map(distances, fn {lang, dist} ->
dist = if max == 0, do: 0, else: 1 - (dist - min) / max
{lang, dist}
end)
end
# trigram stuff
@spec get_clean_trigrams(String.t()) :: result
defp get_clean_trigrams(str) do
str
|> clean
|> pad
|> n_grams
|> Enum.reduce(%{}, fn trigram, acc ->
count = (acc[trigram] && acc[trigram] + 1) || 1
Map.put(acc, trigram, count)
end)
|> Map.to_list()
end
@spec clean(str :: String.t()) :: String.t()
defp clean(str) do
expression_symbols = ~r/[\x{0021}-\x{0040}]+/u
str
|> String.replace(expression_symbols, " ")
|> String.replace(~r/\s+/, " ")
|> String.trim()
|> String.downcase()
end
defp pad(str), do: " #{str} "
@spec n_grams(str :: String.t(), n :: number) :: [String.t()]
defp n_grams(str, n \\ 3) do
str
|> String.graphemes()
|> Enum.chunk_every(n, 1, :discard)
|> Enum.map(&Enum.join/1)
end
end
|
lib/paasaa.ex
| 0.896407
| 0.529568
|
paasaa.ex
|
starcoder
|
defmodule Scenic.Battery.Monitor do
@moduledoc """
A component that displays battery charge in four segments, including empty.
"""
use Scenic.Component, has_children: false
alias Scenic.Graph
alias Scenic.Primitive.Style.Theme
import Scenic.Primitives
@default_theme :dark
@default_data %{charge_level: 3, charging: false}
def verify(nil), do: {:ok, nil}
def verify(data) when is_map(data) do
%{charge_level: charge_level, charging: charging} = merge_defaults(data)
verify_charge_level(charge_level)
verify_charging(charging)
{:ok, data}
end
def verify(_), do: :invalid_data
def init(data, opts) do
%{charge_level: charge_level, charging: charging} = merge_defaults(data)
styles = opts[:styles]
# theme is passed in as an inherited style
theme =
(styles[:theme] || Theme.preset(@default_theme))
|> Theme.normalize()
graph =
Graph.build()
# Background inside the battery
|> rect({59, 33},
fill: theme.background
)
# Battery border
|> rect({59, 33},
stroke: {2, theme.border}
)
# Positive terminal
|> rect({8, 10},
stroke: {2, theme.border},
fill: theme.border,
t: {60, 11}
)
# Charge segments, low to high
|> rect({17, 27},
id: :charge_1,
hidden: charge_level < 1,
stroke: {1, theme.background},
fill: theme.border,
t: {3, 3}
)
|> rect({17, 27},
id: :charge_2,
hidden: charge_level < 2,
stroke: {1, theme.background},
fill: theme.border,
t: {21, 3}
)
|> rect({17, 27},
id: :charge_3,
hidden: charge_level < 3,
stroke: {1, theme.background},
fill: theme.border,
t: {39, 3}
)
# Charging symbol
|> path(
[
{:move_to, 36, 5},
{:line_to, 30, 15},
{:line_to, 43, 15},
{:line_to, 22, 29},
{:line_to, 26, 18},
{:line_to, 16, 18},
:close_path
],
id: :charging_symbol,
stroke: {1, theme.background},
fill: theme.border,
hidden: !charging
)
|> push_graph()
{:ok, %{graph: graph}}
end
def handle_info({:charge_level, charge}, state) when charge in 0..3 do
charge_values = %{
charge_1: charge < 1,
charge_2: charge < 2,
charge_3: charge < 3
}
new_graph =
Enum.reduce(charge_values, state[:graph], fn {id, charged}, acc ->
Graph.modify(acc, id, &update_opts(&1, hidden: charged))
end)
|> push_graph()
{:noreply, %{state | graph: new_graph}}
end
def handle_info({:charging, charging}, state) when is_boolean(charging) do
new_graph =
Graph.modify(state[:graph], :charging_symbol, &update_opts(&1, hidden: !charging))
|> push_graph()
{:noreply, %{state | graph: new_graph}}
end
## Internal
defp merge_defaults(nil), do: @default_data
defp merge_defaults(map) do
Map.merge(@default_data, map, fn _key, default, val -> val || default end)
end
defp verify_charge_level(charge_level) when charge_level in 0..3 do
{:ok}
end
defp verify_charge_level(bad_charge_level) do
raise ":charge_level must be an integer 0..3. Received #{bad_charge_level}"
end
defp verify_charging(charging) when is_boolean(charging) do
{:ok}
end
defp verify_charging(_) do
raise ":charging must be a boolean"
end
end
|
lib/monitor.ex
| 0.790975
| 0.425277
|
monitor.ex
|
starcoder
|
defmodule Dispenser.Buffer do
@moduledoc """
A `Dispenser.Buffer` is a buffer that manages incoming events and demand for those events.
"""
alias Dispenser.{Demands, AssignmentStrategy}
alias LimitedQueue
@typedoc """
The opaque internal state of the `Buffer`.
"""
@opaque t(event, subscriber) :: %__MODULE__{
events: LimitedQueue.t(event),
demands: Demands.t(subscriber),
assignment_strategy: AssignmentStrategy.t()
}
@typedoc """
Various statistics exposed by the `Buffer` for use by debugging and metrics.
See `stats/1`
"""
@type stats() :: %{
buffered: non_neg_integer(),
demand: non_neg_integer()
}
@enforce_keys [:events, :demands, :assignment_strategy]
defstruct [:events, :demands, :assignment_strategy]
@doc """
Create a new `Buffer` with a maximum capacity.
"""
@spec new(AssignmentStrategy.t(), pos_integer(), LimitedQueue.drop_strategy()) ::
t(event, subscriber)
when event: any(), subscriber: any()
def new(assignment_strategy, capacity, drop_strategy) when capacity > 0 do
%__MODULE__{
events: LimitedQueue.new(capacity, drop_strategy),
demands: Demands.new(),
assignment_strategy: assignment_strategy
}
end
@doc """
Add events to the `Buffer`.
If the `Buffer` reaches its capacity, events will be dropped.
"""
@spec append(t(event, subscriber), [event]) ::
{t(event, subscriber), dropped :: non_neg_integer()}
when event: any(), subscriber: any()
def append(%__MODULE__{} = state, []) do
{state, 0}
end
def append(%__MODULE__{} = state, events) when is_list(events) do
{events, dropped} = LimitedQueue.append(state.events, events)
state = %__MODULE__{state | events: events}
{state, dropped}
end
@doc """
Ask for events from the `Buffer`.
These demands are met by calls to `assign_events/1`
"""
@spec ask(t(event, subscriber), subscriber, non_neg_integer()) :: t(event, subscriber)
when event: any(), subscriber: any()
def ask(%__MODULE__{} = state, _subscriber, 0) do
state
end
def ask(%__MODULE__{} = state, subscriber, demand) when demand > 0 do
demands = Demands.add(state.demands, subscriber, demand)
%__MODULE__{state | demands: demands}
end
@doc """
Given the current events and demands, returns the events to send to each subscriber.
"""
@spec assign_events(t(event, subscriber)) :: {t(event, subscriber), [{subscriber, [event]}]}
when event: any(), subscriber: any()
def assign_events(%__MODULE__{} = state) do
event_count = LimitedQueue.size(state.events)
total_demand = Demands.total(state.demands)
if event_count == 0 or total_demand == 0 do
{state, []}
else
{demands_to_meet, remaining_demands} =
state.assignment_strategy.assign(state.demands, event_count)
{remaining_events, assignments} = create_assignments(state.events, demands_to_meet)
state = %__MODULE__{state | demands: remaining_demands, events: remaining_events}
{state, assignments}
end
end
@doc """
Remove all demand from the given subscriber.
"""
@spec delete(t(event, subscriber), subscriber) :: t(event, subscriber)
when event: any(), subscriber: any()
def delete(%__MODULE__{} = state, subscriber) do
demands = Demands.delete(state.demands, subscriber)
%__MODULE__{state | demands: demands}
end
@doc """
Get the number of events in the `Buffer`.
"""
@spec size(t(event, subscriber)) :: non_neg_integer() when event: any(), subscriber: any()
def size(%__MODULE__{} = state) do
LimitedQueue.size(state.events)
end
@doc """
Get various statistics about the `Buffer` for use when debugging and generating metrics.
"""
@spec stats(t(event, subscriber)) :: stats() when event: any(), subscriber: any()
def stats(%__MODULE__{} = state) do
%{
buffered: LimitedQueue.size(state.events),
demand: Demands.total(state.demands)
}
end
@spec create_assignments(
events :: LimitedQueue.t(event),
demands :: Demands.t(subscriber)
) :: {remaining_events :: [event], assignments :: [{subscriber, [event]}]}
when event: any(), subscriber: any()
defp create_assignments(events, demands) do
demands
|> Demands.subscribers()
|> Enum.reduce({events, []}, fn {subscriber, demand}, {remaining_events, assignments} ->
{remaining_events, events_to_send} = LimitedQueue.split(remaining_events, demand)
assignments = [{subscriber, events_to_send} | assignments]
{remaining_events, assignments}
end)
end
end
|
lib/dispenser/buffer.ex
| 0.876145
| 0.578627
|
buffer.ex
|
starcoder
|
defmodule Ergo.Numeric do
alias Ergo.{Context, Parser}
import Ergo.{Terminals, Combinators}
@moduledoc """
The Parsers module exists to house utility parsers that while they are terminals in the sense that they are not parameterised, they internally make use of parsers from the Combinators module.
# Parsers
* `uint`
* `decimal`
* `digits`
"""
@doc ~S"""
The `unit` parser matches a series of at least one digit and returns the
integer value of the digits.
## Examples
iex> alias Ergo.Context
iex> import Ergo.Numeric
iex> context = Ergo.parse(uint(), "2345")
iex> assert %Context{status: :ok, ast: 2345, index: 4, col: 5} = context
"""
def uint(opts \\ []) do
label = Keyword.get(opts, :label, "uint")
parser = digits()
Parser.terminal(
:uint,
label,
fn %Context{} = ctx ->
with %Context{status: :ok, ast: ast} = new_ctx <- Parser.invoke(ctx, parser) do
uint_value = ast |> Enum.join("") |> String.to_integer()
%{new_ctx | ast: uint_value}
end
end
)
end
@doc ~S"""
## Examples
iex> alias Ergo.Context
iex> import Ergo.Numeric
iex> context = Ergo.parse(decimal(), "234.56")
iex> assert %Context{status: :ok, ast: 234.56} = context
"""
def decimal(opts \\ []) do
label = Keyword.get(opts, :label, "decimal")
parser = sequence([digits(), ignore(char(?.)), digits()], label: "ddd.dd")
Parser.terminal(
:decimal,
label,
fn %Context{} = ctx ->
with %Context{status: :ok, ast: ast} = new_ctx <- Parser.invoke(ctx, parser) do
[i_part | [d_part]] = ast
i_val = i_part |> Enum.join("")
d_val = d_part |> Enum.join("")
%{new_ctx | ast: String.to_float("#{i_val}.#{d_val}")}
end
end
)
end
@doc ~S"""
The `digits` parser matches a series of at least one digit and returns an enumeration
of the digits.
## Examples
iex> alias Ergo.Context
iex> import Ergo.Numeric
iex> context = Ergo.parse(digits(), "2345")
iex> assert %Context{status: :ok, ast: [2, 3, 4, 5]} = context
"""
def digits(opts \\ []) do
label = Keyword.get(opts, :label, "digits")
many(digit(),
label: label,
min: 1,
ast: fn digits -> Enum.map(digits, fn digit -> digit - ?0 end) end
)
end
@doc ~S"""
The `number` parser matches both integer and decimal string and converts them into their
appropriate Elixir integer or float values.
## Examples
iex> import Ergo.Numeric
iex> assert %{status: :ok, ast: 42} = Ergo.parse(number(), "42")
iex> import Ergo.Numeric
iex> assert %{status: :ok, ast: -42} = Ergo.parse(number(), "-42")
iex> import Ergo.Numeric
iex> assert %{status: :ok, ast: 42.0} = Ergo.parse(number(), "42.0")
iex> import Ergo.Numeric
iex> assert %{status: :ok, ast: -42.0} = Ergo.parse(number(), "-42.0")
iex> import Ergo.Numeric
iex> assert %{status: :ok, ast: 0} = Ergo.parse(number(), "0")
iex> import Ergo.Numeric
iex> assert %{status: :ok, ast: 0} = Ergo.parse(number(), "0000")
iex> import Ergo.Numeric
iex> assert %{status: {:error, _}} = Ergo.parse(number(), "Fourty Two")
iex> import Ergo.Numeric
iex> assert %{status: :ok, ast: 42} = Ergo.parse(number(), "42Fourty Two")
"""
def number(opts \\ []) do
label = Keyword.get(opts, :label, "number")
sequence(
[
optional(char(?-), ast: fn _ -> -1 end, label: "?-"),
choice(
[
decimal(),
uint()
],
label: "int|dec"
)
],
label: label,
ast: &Enum.product/1
)
end
end
|
lib/ergo/numeric.ex
| 0.76074
| 0.511961
|
numeric.ex
|
starcoder
|
defmodule Anaphora do
@vsn "0.1.2"
@moduledoc """
The anaphoric macro collection for Elixir
"""
defmacro __using__(_) do
quote do
import Anaphora
end
end
## Returns the `it` variable defined in user context
defp it do
Macro.var(:it, nil)
end
@doc """
Binds the `expression` to `it` (via `case`) in the scope of the `body`
## Examples
iex> Anaphora.alet 2 * 2 + 2, do: it / 2
3.0
iex> Anaphora.alet tl([1, 2, 3]) do
...> hd(it) # do some staff
...> tl(it)
...> end
[3]
"""
defmacro alet(expression, do: body) do
quote do
case unquote(expression) do
unquote(it) -> unquote(body)
end
end
end
@doc """
Works like `if`, except that result of the `condition` is bound to `it` (via `alet`) for the
scope of the then and else `clauses`
## Examples
iex> Anaphora.aif :aif_test, do: it
:aif_test
iex> Anaphora.aif 2 * 2 + 2 do
...> it / 2
...> else
...> :never
...> end
3.0
iex> Anaphora.aif 1 == 2, do: :never, else: it
false
"""
defmacro aif(condition, clauses) do
quote do
Anaphora.alet unquote(condition) do
if(unquote(it), unquote(clauses))
end
end
end
@doc """
Works like `cond`, except that result of each `condition` is bound to `it` (via `alet`) for the
scope of the corresponding `body`
## Examples
iex> Anaphora.acond do
...> :acond_test -> it
...> end
:acond_test
iex> Anaphora.acond do
...> 1 + 2 == 4 -> :never
...> false -> :never
...> 2 * 2 + 2 ->
...> it * 2 # do some staff
...> it / 2
...> true -> :never
...> end
3.0
iex> Anaphora.acond do
...> false -> :never
...> end
nil
"""
defmacro acond(clauses)
defmacro acond(do: []), do: nil
defmacro acond(do: clauses) do
clauses |> Enum.reverse |> Enum.reduce(nil, &expand_acond_clause/2)
end
defp expand_acond_clause({:->, _c, [[condition], then_body]}, else_body) do
quote do
Anaphora.aif unquote(condition), do: unquote(then_body), else: unquote(else_body)
end
end
@doc """
Works like `case`, except that result of the `key` expression is bound to `it` (via `alet`) for the
scope of the `cases`
## Examples
iex> Anaphora.acase :acase_test do
...> :acase_test -> it
...> end
:acase_test
iex> Anaphora.acase [1, 2, 3] do
...> {a, b, c} -> :never
...> [1 | tale] -> it -- tale
...> _ -> :never
...> end
[1]
iex> try do
...> Anaphora.acase true do
...> false -> :never
...> end
...> rescue
...> _e in CaseClauseError -> :error
...> end
:error
"""
defmacro acase(key, do: cases) do
quote do
Anaphora.alet unquote(key) do
case unquote(it) do
unquote(cases)
end
end
end
end
@doc """
Evaluates each `clause` one at a time and binds result to `it`. As soon as any `clause`
evaluates to `nil` (or `false`), `aand` returns `nil` without evaluating the remaining
`clauses`. If all `clauses` but the last evaluate to true values, `aand` returns the
results produced by evaluating the last `clause`
## Examples
iex> Anaphora.aand do
...> end
true
iex> Anaphora.aand do
...> :aand_test
...> end
:aand_test
iex> Anaphora.aand do
...> 2 + 3
...> 1 + it + 4
...> it * 20
...> end
200
iex> Anaphora.aand do
...> 1 == 2
...> !it
...> end
nil
"""
defmacro aand(clauses)
defmacro aand(do: nil), do: true
defmacro aand(do: {:__block__, _c, clauses}) do
clauses |> Enum.reverse |> Enum.reduce(&expand_aand_clause/2)
end
defmacro aand(do: expression), do: expression
defp expand_aand_clause(clause, body) do
quote do
Anaphora.aif unquote(clause), do: unquote(body)
end
end
@doc """
Works like `fn`, except that anonymous function is bound to `it` (via `blood magic`)
## Examples
iex> fact = Anaphora.afn do
...> 0 -> 1
...> 1 -> 1
...> n when n > 0 -> n * it.(n - 1)
...> end
...> fact.(5)
120
iex> fib = Anaphora.afn do
...> 0 -> 1
...> 1 -> 1
...> n when n > 0 -> it.(n - 1) + it.(n - 2)
...> end
...> Enum.map(1..7, fib)
[1, 2, 3, 5, 8, 13, 21]
iex> (Anaphora.afn do
...> x, y when x > 0 -> x + it.(x - 1, y)
...> 0, y when y > 0 -> y + it.(0, y - 1)
...> 0, 0 -> 0
...> end).(2, 4)
13
"""
defmacro afn(do: definitions) do
ys = generate_z_combinator_ys(hd(definitions))
# Ξ»x.f (Ξ»ys.((x x) ys))
lambda_x =
quote do: fn x -> f.(fn unquote_splicing(ys) -> (x.(x)).(unquote_splicing(ys)) end) end
lambda_f =
quote do: fn f -> (unquote(lambda_x)).(unquote(lambda_x)) end
lambda_it =
quote do: fn unquote(it) -> unquote({:fn, [], definitions}) end
quote do: (unquote(lambda_f)).(unquote(lambda_it))
end
defp generate_z_combinator_ys({:->, _c, [arguments, _body]}) do
1..number_of_afn_arguments(arguments)
|> Enum.map(&(Macro.var(String.to_atom("y#{&1}"), __MODULE__)))
end
defp number_of_afn_arguments([{:when, _c, arguments}]), do: Enum.count(arguments) - 1
defp number_of_afn_arguments(arguments), do: Enum.count(arguments)
end
|
lib/anaphora.ex
| 0.798462
| 0.635703
|
anaphora.ex
|
starcoder
|
defmodule PingPlug do
@moduledoc """
PingPlug, a plug to echo a defined message.
PingPlug can be used in two ways, execute a function or echo a message.
#### Execute a function
To execute any function you must define a module function that return:
{:ok, String.t()} | {:error, String.t()}
In which,
* `:ok` will be mapped to HTTP 200, and
* `:error` will be mapped to HTTP 500
A simple use case is to use it as a liveness probe. For example, configure
PingPlug to match a specific path and execute dependency checks.
## Examples
Here is an example of returning a value when a request hits
`/_checks/liveness` path.
```
# endpoint.ex
plug PingPlug, path: ["_checks", "liveness"], return: "alive!"
```
Here is an example of executing a function to check database connectivity.
First, define a module that sends a query to a database to ensure it is up and
running.
```
# db_check.ex
defmodule DBCheck do
def check do
case Ecto.Adapters.SQL.query(Repo, "SELECT 1") do
{:ok, _result} ->
{:ok, "ready!"}
{:error, exception} ->
{:error, Exception.message(exception)}
end
end
end
```
Then plug it into the readiness endpoint.
```
# endpoint.ex
plug PingPlug, path: ["_checks", "readiness"], execute: {DBCheck, :check, []}
```
"""
@type execute :: {module(), function_name :: atom(), [any()]}
@type options :: [{:path, nonempty_list()}, {:return, String.t()}, {:execute, execute()}]
import Plug.Conn
@spec init(options()) :: options()
def init([{:path, [_ | _]} | _] = options) do
options
end
@spec call(Plug.Conn.t(), options()) :: Plug.Conn.t()
def call(%Plug.Conn{path_info: path_info} = conn, [{:path, path}, {:return, message}]) do
if path_info == path do
conn
|> resp(:ok, message)
|> halt()
else
conn
end
end
def call(%Plug.Conn{path_info: path_info} = conn, [
{:path, path},
{:execute, {mod, function_name, args}}
]) do
if path_info == path do
case apply(mod, function_name, args) do
{:ok, message} ->
conn
|> resp(:ok, message)
|> halt()
{:error, message} ->
conn
|> resp(:internal_server_error, message)
|> halt()
end
else
conn
end
end
end
|
lib/ping_plug.ex
| 0.837852
| 0.90657
|
ping_plug.ex
|
starcoder
|
defmodule Advent.Y2020.D12 do
@moduledoc """
https://adventofcode.com/2020/day/12
"""
def part_one(input) do
input
|> Stream.map(&split_instruction/1)
|> Enum.reduce({0, 0, 90}, fn
# Move North
{"N", value}, {x, y, deg} -> {x, y + value, deg}
{"F", value}, {x, y, 0} -> {x, y + value, 0}
# Move South
{"S", value}, {x, y, deg} -> {x, y - value, deg}
{"F", value}, {x, y, 180} -> {x, y - value, 180}
# Move East
{"E", value}, {x, y, deg} -> {x + value, y, deg}
{"F", value}, {x, y, 90} -> {x + value, y, 90}
# Move West
{"W", value}, {x, y, deg} -> {x - value, y, deg}
{"F", value}, {x, y, 270} -> {x - value, y, 270}
# Rotate
{"R", value}, {x, y, deg} -> {x, y, rem(deg + value, 360)}
{"L", value}, {x, y, deg} -> {x, y, rem(360 + deg - value, 360)}
end)
|> (fn {x, y, _} -> manhattan_distance({x, y}) end).()
end
def part_two(input) do
input
|> Stream.map(&split_instruction/1)
|> Enum.reduce({{0, 0}, {10, 1}}, fn
# Move waypoint
{"N", value}, {coords, {x, y}} -> {coords, {x, y + value}}
{"S", value}, {coords, {x, y}} -> {coords, {x, y - value}}
{"E", value}, {coords, {x, y}} -> {coords, {x + value, y}}
{"W", value}, {coords, {x, y}} -> {coords, {x - value, y}}
# Rotate waypoint (https://en.wikipedia.org/wiki/Rotation_matrix#Common_rotations)
# 90 clockwise
{"R", 90}, {coords, {x, y}} -> {coords, {y, -x}}
{"L", 270}, {coords, {x, y}} -> {coords, {y, -x}}
# 180
{"R", 180}, {coords, {x, y}} -> {coords, {-x, -y}}
{"L", 180}, {coords, {x, y}} -> {coords, {-x, -y}}
# 90 counter-clockwise
{"R", 270}, {coords, {x, y}} -> {coords, {-y, x}}
{"L", 90}, {coords, {x, y}} -> {coords, {-y, x}}
# Move plane
{"F", value}, {{px, py}, {wx, wy}} -> {{px + wx * value, py + wy * value}, {wx, wy}}
end)
|> (fn {coords, _} -> manhattan_distance(coords) end).()
end
defp split_instruction(<<action::binary-size(1), value::binary>>) do
{action, String.to_integer(value)}
end
defp manhattan_distance({x, y}) do
abs(x) + abs(y)
end
end
|
lib/advent/y2020/d12.ex
| 0.788583
| 0.827061
|
d12.ex
|
starcoder
|
defmodule FloUI.Scrollable.Container do
@moduledoc """
## Usage in SnapFramework
Scrollable container is used for large content that you want to scroll. It renders a component child within.
this is meant to be as plug and play as possible. With minimal fiddling to get it to work.
You can use the ScrollBar component directly if you want to build your own scrollable containers.
data is an object in the form of
``` elixir
%{
frame: {460, 470},
content: {800, 800}
}
```
You can choose which scroll bars to render via the `scroll_bars` option.
``` elixir
<%= graph font_size: 20 %>
<%= component FloUI.Scrollable.Container,
%{
frame: {460, 470},
content: {800, 800}
},
translate: {20, 60},
scroll_bars: %{
vertical: %{
show: true,
show_buttons: true,
theme: Scenic.Primitive.Style.Theme.preset(:dark),
thickness: 15
},
horizontal: %{
show: true,
show_buttons: true,
theme: Scenic.Primitive.Style.Theme.preset(:dark),
thickness: 15
}
} do %>
<%= component Basic.Component.Page4, nil %>
<% end %>
```
"""
alias Scenic.Graph
alias Scenic.Math.Vector2
alias FloUI.Scrollable.Hotkeys
alias FloUI.Scrollable.Direction
alias FloUI.Scrollable.Acceleration
alias FloUI.Scrollable.PositionCap
use SnapFramework.Component,
name: :scrollable_container,
template: "lib/scrollable/container.eex",
controller: FloUI.Scrollable.ScrollableContainerController,
assigns: [],
opts: []
defcomponent(:scrollable_container, :map)
@default_horizontal_scroll_bar %{
show: false,
show_buttons: false,
thickness: 15,
radius: 3,
theme: FloUI.Theme.preset(:scrollbar)
}
@default_vertical_scroll_bar %{
show: true,
show_buttons: true,
thickness: 15,
radius: 3,
theme: FloUI.Theme.preset(:scrollbar)
}
@default_position {0, 0}
@default_fps 30
use_effect([assigns: [scroll_position: :any]],
run: [:on_scroll_position_change]
)
@impl true
def setup(%{assigns: %{data: data, opts: opts}} = scene) do
{content_width, content_height} = data.content
{frame_width, frame_height} = data.frame
{frame_x, frame_y} = opts[:translate] || @default_position
scroll_position = Map.get(data, :scroll_position, {0, 0})
scroll_bars =
case opts[:scroll_bars] do
nil ->
%{vertical: @default_vertical_scroll_bar, horizontal: @default_horizontal_scroll_bar}
scroll_bars ->
vertical = Map.get(scroll_bars, :vertical, @default_vertical_scroll_bar)
horizontal = Map.get(scroll_bars, :horizontal, @default_horizontal_scroll_bar)
%{
vertical: %{
show: Map.get(vertical, :show, true),
show_buttons: Map.get(vertical, :show_buttons, true),
thickness: Map.get(vertical, :thickness, 15),
radius: Map.get(vertical, :radius, 3),
theme: Map.get(vertical, :theme, FloUI.Theme.preset(:scrollbar))
},
horizontal: %{
show: Map.get(horizontal, :show, true),
show_buttons: Map.get(horizontal, :show_buttons, true),
thickness: Map.get(horizontal, :thickness, 15),
radius: Map.get(horizontal, :radius, 3),
theme: Map.get(horizontal, :theme, FloUI.Theme.preset(:scrollbar))
}
}
end
assign(scene,
id: opts[:id] || :scrollable,
theme: opts[:theme] || FloUI.Theme.preset(:scrollbar),
frame: %{x: frame_x, y: frame_y, width: frame_width, height: frame_height},
content: %{x: 0, y: 0, width: content_width, height: content_height},
scroll_position: scroll_position,
fps: opts[:scroll_fps] || @default_fps,
acceleration: Acceleration.init(opts[:scroll_acceleration]),
hotkeys: Hotkeys.init(opts[:scroll_hotkeys]),
scroll_direction: nil,
scroll_bars_state: %{
vertical: %{
scrolling: :idle,
wheel_state: nil,
scroll_buttons: %{
scroll_button_1: :released,
scroll_button_2: :released
},
pid: nil
},
horizontal: %{
scrolling: :idle,
wheel_state: nil,
scroll_buttons: %{
scroll_button_1: :released,
scroll_button_2: :released
},
pid: nil
}
},
scroll_bars: scroll_bars
)
|> init_position_caps
end
@impl true
def mounted(scene) do
scene =
FloUI.Scrollable.ScrollableContainerController.render_content(scene)
Scenic.Scene.push_graph(scene, scene.assigns.graph)
end
@impl true
def bounds(%{frame: {x, y}}, _opts) do
{0.0, 0.0, x, y}
end
@impl true
def process_update(data, _opts, scene) do
scene =
assign(scene,
data: data,
scroll_position: PositionCap.cap(scene.assigns.position_caps, Vector2.invert(data.scroll_position))
)
{:noreply, scene}
end
@impl true
def process_event(
{:register_scroll_bar, direction, scroll_bar_state},
_pid,
%{assigns: %{scroll_bars_state: scroll_bars_state}} = scene
) do
scene =
scene
|> assign(
scroll_bars_state:
Map.update!(scroll_bars_state, direction, fn _ ->
scroll_bar_state
end)
)
{:noreply, scene}
end
def process_event({:scroll_bar_state_changed, direction, scroll_bar_state}, _, scene) do
scene =
scene
|> assign(
scroll_bars_state: Map.update!(
scene.assigns.scroll_bars_state,
direction, fn _ ->
scroll_bar_state
end)
)
|> update
{:noreply, scene}
end
def process_event({:update_scroll_position, :vertical, {_, y}}, _, scene) do
{x, _} = scene.assigns.scroll_position
{:noreply, assign(scene, scroll_position: PositionCap.cap(scene.assigns.position_caps, {x, y}))}
end
def process_event({:update_scroll_position, :horizontal, {_, x}}, _, scene) do
{_, y} = scene.assigns.scroll_position
{:noreply, assign(scene, scroll_position: PositionCap.cap(scene.assigns.position_caps, {x, y}))}
end
def process_event({:update_scroll_position, pos}, _, scene) do
{:noreply, assign(scene, scroll_position: PositionCap.cap(scene.assigns.position_caps, pos))}
end
def process_event(event, _, scene) do
{:cont, event, scene}
end
@impl true
def process_input(
{:cursor_scroll, scroll_pos},
:input_capture,
%{assigns: %{scroll_bars_state: scroll_bars_state}} = scene
) do
if not is_nil(scroll_bars_state.vertical.pid) do
GenServer.cast(scroll_bars_state.vertical.pid, {:update_cursor_scroll, scroll_pos})
end
if not is_nil(scroll_bars_state.horizontal.pid) do
GenServer.cast(scroll_bars_state.horizontal.pid, {:update_cursor_scroll, scroll_pos})
end
{:noreply, scene}
end
@impl true
def process_info(:tick, scene) do
{:noreply, assign(scene, animating: false) |> update}
end
@spec init_position_caps(Scenic.Scene.t) :: Scenic.Scene.t
defp init_position_caps(
%{
assigns: %{
frame: %{width: frame_width, height: frame_height},
content: %{x: x, y: y, width: content_width, height: content_height}
}
} = scene
) do
min = {x + frame_width - content_width, y + frame_height - content_height}
max = {x, y}
position_cap = PositionCap.init(%{min: min, max: max})
assign(scene,
position_caps: position_cap,
scroll_position: PositionCap.cap(position_cap, Vector2.invert(scene.assigns.scroll_position))
)
end
@spec update(Scenic.Scene.t) :: Scenic.Scene.t
defp update(scene) do
scene
|> apply_force
|> verify_cooling_down
|> tick
end
@spec verify_cooling_down(Scenic.Scene.t) :: Scenic.Scene.t
defp verify_cooling_down(%{assigns: %{scroll_bars_state: %{vertical: vertical, horizontal: horizontal}}} = scene) do
if vertical.scrolling == :idle and
horizontal.scrolling == :idle and not
Acceleration.is_stationary?(scene.assigns.acceleration)
do
assign(scene,
scroll_bars_state: %{
vertical: %{vertical | scrolling: :cooling_down},
horizontal: %{horizontal | scrolling: :cooling_down}
}
)
else
if vertical.scrolling == :cooling_down and horizontal.scrolling == :cooling_down and Acceleration.is_stationary?(scene.assigns.acceleration) do
assign(scene,
scroll_bars_state: %{
vertical: %{vertical | scrolling: :idle},
horizontal: %{horizontal | scrolling: :idle}
}
)
else
scene
end
end
end
@spec apply_force(Scenic.Scene.t) :: Scenic.Scene.t
defp apply_force(
%{
assigns: %{
scroll_bars_state: %{
vertical: %{
scrolling: :idle
},
horizontal: %{
scrolling: :idle
}
}
}
} = scene
), do: scene
defp apply_force(
%{
assigns: %{
scroll_bars_state: %{
vertical: %{
scrolling: vert_scroll,
wheel_state: %{offset: {_, offset_y}}
},
horizontal: %{
scrolling: horiz_scroll,
wheel_state: %{offset: {_, offset_x}}
}
}
}
} = scene
) when vert_scroll == :wheel or horiz_scroll == :wheel do
{x, y} = scene.assigns.scroll_position
scroll_position = {x + offset_x * 5, y + offset_y * 5}
assign(scene, scroll_position: PositionCap.cap(scene.assigns.position_caps, scroll_position))
end
defp apply_force(scene) do
scroll_direction = get_scroll_direction(scene)
force =
Hotkeys.direction(scene.assigns.hotkeys)
|> Vector2.add(scroll_direction)
Acceleration.apply_force(scene.assigns.acceleration, force)
|> Acceleration.apply_counter_pressure()
|> (&assign(scene, acceleration: &1)).()
|> (fn scene ->
scroll_pos =
Acceleration.translate(scene.assigns.acceleration, scene.assigns.scroll_position)
assign(scene, scroll_position: PositionCap.cap(scene.assigns.position_caps, scroll_pos))
end).()
end
@spec get_scroll_direction(Scenic.Scene.t) :: Scenic.Math.Vector2.t
defp get_scroll_direction(%{assigns: %{scroll_bars_state: scroll_bars_state}}) do
case scroll_bars_state do
%{vertical: %{scroll_buttons: %{scroll_button_1: :pressed, scroll_button_2: :released}}} ->
Direction.return(1, :vertical)
|> Direction.to_vector_2()
%{vertical: %{scroll_buttons: %{scroll_button_1: :released, scroll_button_2: :pressed}}} ->
Direction.return(-1, :vertical)
|> Direction.to_vector_2()
%{horizontal: %{scroll_buttons: %{scroll_button_1: :pressed, scroll_button_2: :released}}} ->
Direction.return(1, :horizontal)
|> Direction.to_vector_2()
%{horizontal: %{scroll_buttons: %{scroll_button_1: :released, scroll_button_2: :pressed}}} ->
Direction.return(-1, :horizontal)
|> Direction.to_vector_2()
_ ->
{0, 0}
end
end
@spec tick(Scenic.Scene.t) :: Scenic.Scene.t
defp tick(%{assigns: %{scroll_bars_state: %{vertical: %{scrolling: :idle}, horizontal: %{scrolling: :idle}}}} = scene), do: assign(scene, animating: false)
defp tick(%{assigns: %{scroll_bars_state: %{vertical: %{scrolling: :dragging}}}} = scene) do
assign(scene, animating: false)
end
defp tick(%{assigns: %{scroll_bars_state: %{horizontal: %{scrolling: :dragging}}}} = scene) do
assign(scene, animating: false)
end
defp tick(%{assigns: %{scroll_bars_state: %{vertical: %{scrolling: vert_scrolling}, horizontal: %{scrolling: horiz_scrolling}}}} = scene)
when vert_scrolling == :wheel or horiz_scrolling == :wheel
do
assign(scene, animating: false)
end
defp tick(%{assigns: %{animating: true}} = scene), do: scene
defp tick(scene) do
Process.send_after(self(), :tick, tick_time(scene))
assign(scene, animating: true)
end
@spec tick_time(Scenic.Scene.t) :: integer
defp tick_time(%{assigns: %{fps: fps}}) do
trunc(1000 / fps)
end
end
|
lib/scrollable/container.ex
| 0.835282
| 0.731011
|
container.ex
|
starcoder
|
defmodule CommonGraphqlClient.StaticValidator do
@moduledoc """
Validates a query against a static graphql schema
"""
@doc """
This method validates a query against a graphql schema and returns `:ok` if
the query is valid or returns `{:error, reason}`.
This method takes:
* a `query_string`: A graphql query string
* Options: Options for validation include:
- `schema_string`: Contents on graphql schema to validate the query for
- `schema_path`: Path to the file containing graphql schema
- `validation_strategy`: Way to validate schema. Could be done in multiple
ways:
* `:npm_graphql` (needs `npm` cli) Uses npm calls to validate query
* `:native` (todo: parse schema in elixir) Will validate in pure elixir
## Examples:
# When a schema file is given and the query is valid
iex> schema_path = "./test/support/example_schema.json"
iex> query_string = "{ __schema { types { name } } }"
iex> validation_strategy = :npm_graphql
iex> CommonGraphqlClient.StaticValidator.validate(
...> query_string,
...> %{ validation_strategy: validation_strategy,
...> schema_path: schema_path }
...> )
:ok
# When a schema string is given and the query is valid
iex> schema_path = "./test/support/example_schema.json"
iex> schema_string = File.read!(schema_path)
iex> query_string = "{ __schema { types { name } } }"
iex> validation_strategy = :npm_graphql
iex> CommonGraphqlClient.StaticValidator.validate(
...> query_string,
...> %{ validation_strategy: validation_strategy,
...> schema_string: schema_string }
...> )
:ok
# When query is invalid
iex> schema_path = "./test/support/example_schema.json"
iex> query_string = "bad query string"
iex> validation_strategy = :npm_graphql
iex> {:error, error} = CommonGraphqlClient.StaticValidator.validate(
...> query_string,
...> %{ validation_strategy: validation_strategy,
...> schema_path: schema_path }
...> )
iex> Regex.match?(~r/Unexpected Name \\"bad\\"/, error)
true
# When schema is invalid
iex> schema_string = "bad schema"
iex> query_string = "{ __schema { types { name } } }"
iex> validation_strategy = :npm_graphql
iex> {:error, error} = CommonGraphqlClient.StaticValidator.validate(
...> query_string,
...> %{ validation_strategy: validation_strategy,
...> schema_string: schema_string }
...> )
iex> Regex.match?(~r/SyntaxError/, error)
true
# When validation_strategy is native
iex> schema_string = "someschema"
iex> query_string = "somequery"
iex> validation_strategy = :native
iex> CommonGraphqlClient.StaticValidator.validate(
...> query_string,
...> %{ validation_strategy: validation_strategy,
...> schema_string: schema_string }
...> )
** (RuntimeError) Not Implemented
"""
@spec validate(String.t(), Map.t()) :: :ok | {:error, term()}
def validate(query_string, opts \\ %{})
def validate(query_string, opts) do
case Map.get(opts, :validation_strategy, :npm_graphql) do
:npm_graphql ->
__MODULE__.NpmGraphql.validate(query_string, opts)
_ ->
raise "Not Implemented"
end
end
end
|
lib/common_graphql_client/static_validator.ex
| 0.918713
| 0.577049
|
static_validator.ex
|
starcoder
|
defmodule Yaq do
@moduledoc """
Yet Another Queue module for Elixir
"""
@type value :: term()
@opaque t :: %__MODULE__{
l_data: list(),
r_data: list(),
l_size: non_neg_integer(),
r_size: non_neg_integer()
}
@opaque t(value) :: %__MODULE__{
l_data: list(value),
r_data: list(value),
l_size: non_neg_integer(),
r_size: non_neg_integer()
}
defstruct l_data: [], r_data: [], l_size: 0, r_size: 0
@doc """
Concatenate an enumerable to the rear of the queue.
## Parameters
- `q`: Current quque
- `enum`: Items to add
## Examples
iex> q = Yaq.new(1..10) |> Yaq.concat(11..20)
#Yaq<length: 20>
iex> Yaq.to_list(q)
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]
"""
@spec concat(t(), Enumerable.t()) :: t()
def concat(%__MODULE__{} = q, enum),
do: Enum.reduce(enum, q, fn x, acc -> Yaq.enqueue(acc, x) end)
@doc """
Concatenate an enumerable to the front of the queue.
## Parameters
- `q`: Current quque
- `enum`: Items to add
## Examples
iex> q = Yaq.new(1..10) |> Yaq.concat_r(11..20)
#Yaq<length: 20>
iex> Yaq.to_list(q)
[11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
"""
@spec concat_r(t(), Enumerable.t()) :: t()
def concat_r(%__MODULE__{} = q, enum),
do: Enum.reverse(enum) |> Enum.reduce(q, fn x, acc -> Yaq.enqueue_r(acc, x) end)
@doc """
Push a new term onto the rear of the queue.
## Parameters
- `q`: Current queue
- `term`: Elixir term to enqueue
## Examples
iex> q = Yaq.new()
#Yaq<length: 0>
iex> q = Yaq.enqueue(q, 1)
#Yaq<length: 1>
iex> q = Yaq.enqueue(q, 2)
#Yaq<length: 2>
iex> Yaq.to_list(q)
[1, 2]
"""
@spec enqueue(t(), term()) :: t()
def enqueue(%__MODULE__{l_data: [], r_data: []}, term),
do: %__MODULE__{l_data: [term], l_size: 1}
def enqueue(%__MODULE__{} = q, term), do: %{q | r_data: [term | q.r_data], r_size: q.r_size + 1}
@doc """
Push a new term onto the front of the queue.
## Examples
iex> q = Yaq.new()
#Yaq<length: 0>
iex> q = Yaq.enqueue_r(q, 1)
#Yaq<length: 1>
iex> q = Yaq.enqueue_r(q, 2)
#Yaq<length: 2>
iex> Yaq.to_list(q)
[2, 1]
"""
@spec enqueue_r(t(), term()) :: t()
def enqueue_r(%__MODULE__{l_data: [], r_data: []}, term), do: enqueue(%__MODULE__{}, term)
def enqueue_r(%__MODULE__{} = q, term),
do: %{q | l_data: [term | q.l_data], l_size: q.l_size + 1}
@doc """
Create a new queue.
## Parameters
* `enum` (optional): initial queue data
## Examples
iex> Yaq.new()
#Yaq<length: 0>
iex> Yaq.new(1..10)
#Yaq<length: 10>
"""
@spec new() :: t()
@spec new(Enumerable.t()) :: t()
def new(enum \\ []) do
data = Enum.to_list(enum)
size = length(data)
%__MODULE__{l_data: data, l_size: size}
end
@doc """
Remove an item from the front of the queue.
## Parameters
- `q`: The current queue
- `default` (optional): Return value if the queue is empty. Defaults to `nil`
## Examples
iex> {term, q} = Yaq.new(1..3) |> Yaq.dequeue()
iex> term
1
iex> q
#Yaq<length: 2>
iex> {term, q} = Yaq.new() |> Yaq.dequeue()
iex> term
nil
iex> q
#Yaq<length: 0>
iex> {term, q} = Yaq.new() |> Yaq.dequeue(:user_defined)
iex> term
:user_defined
iex> q
#Yaq<length: 0>
"""
@spec dequeue(t()) :: {value(), t()}
def dequeue(%__MODULE__{} = q, default \\ nil) do
q = rebalance(q)
case q.l_data do
[] ->
{default, q}
[term] ->
{term, %{q | l_data: [], l_size: 0}}
[term | l_data] ->
{term, %{q | l_data: l_data, l_size: q.l_size - 1}}
end
end
@doc """
Remove an item from the rear of the queue.
## Parameters
- `q`: The current queue
- `default` (optional): Return value if the queue is empty. Defaults to `nil`
## Examples
iex> {term, q} = Yaq.new(1..3) |> Yaq.dequeue_r()
iex> term
3
iex> q
#Yaq<length: 2>
iex> {term, q} = Yaq.new() |> Yaq.dequeue_r()
iex> term
nil
iex> q
#Yaq<length: 0>
iex> {term, q} = Yaq.new() |> Yaq.dequeue_r(:user_defined)
iex> term
:user_defined
iex> q
#Yaq<length: 0>
"""
@spec dequeue_r(t()) :: {value(), t()}
def dequeue_r(%__MODULE__{} = q, default \\ nil) do
q = rebalance(q)
case q.r_data do
[] ->
dequeue(q, default)
[term] ->
{term, %{q | r_data: [], r_size: 0}}
[term | r_data] ->
{term, %{q | r_data: r_data, r_size: q.r_size - 1}}
end
end
@doc """
Return the front element of the queue. Returns `nil` if empty
## Parameters
- `q`: Current queue
## Examples
iex> Yaq.new() |> Yaq.peek()
nil
iex> Yaq.new(1..3) |> Yaq.peek()
1
"""
@spec peek(t()) :: value()
def peek(%__MODULE__{l_data: [], r_data: []}), do: nil
def peek(%__MODULE__{} = q) do
{value, _queue} = __MODULE__.dequeue(q)
value
end
@doc """
Returns the rear element of the queue.
## Parameters
- `q`: Current queue
## Examples
iex> Yaq.new() |> Yaq.peek_r()
nil
iex> Yaq.new(1..3) |> Yaq.peek_r()
3
"""
@spec peek_r(t()) :: value()
def peek_r(%__MODULE__{l_data: [], r_data: []}), do: nil
def peek_r(q) do
{value, _queue} = __MODULE__.dequeue_r(q)
value
end
@doc """
Fetches the front value from the queue.
If the queue is empty, reutrns the `:error`. Otherwise, returns the tuple `{value, updated_q}`.
## Parameters
- `q`: Current queue
## Examples
iex> Yaq.new() |> Yaq.fetch()
:error
iex> {response, queue} = Yaq.new([1, 2, 3]) |> Yaq.fetch()
iex> response
1
iex> queue
#Yaq<length: 2>
"""
@spec fetch(t()) :: {value(), t()} | :error
def fetch(%__MODULE__{l_size: 0, r_size: 0}), do: :error
def fetch(%__MODULE__{} = q), do: dequeue(q)
@doc """
Fetches the front value from the queue.
If the queue is empty, raises `Yaq.EmptyQueueError`. Otherwise, returns the tuple `{value, q}`.
## Parameters
- `q`: Current queue
## Examples
iex> Yaq.new() |> Yaq.fetch!()
** (Yaq.EmptyQueueError) empty queue
iex> {response, queue} = Yaq.new([1, 2, 3]) |> Yaq.fetch!()
iex> response
1
iex> queue
#Yaq<length: 2>
"""
@spec fetch!(t()) :: {value(), t()}
def fetch!(%__MODULE__{l_size: 0, r_size: 0}), do: raise(Yaq.EmptyQueueError)
def fetch!(%__MODULE__{} = q), do: dequeue(q)
@doc """
Fetches the rear value from the queue.
If the queue is empty, returns the `:error`. Otherwise, returns the tuple `{{:ok, value}, q}`.
## Parameters
- `q`: Current queue
## Examples
iex> Yaq.new() |> Yaq.fetch_r()
:error
iex> {response, queue} = Yaq.new([1, 2, 3]) |> Yaq.fetch_r()
iex> response
3
iex> queue
#Yaq<length: 2>
"""
@spec fetch_r(t()) :: {{:ok, value()}, t()} | :error
def fetch_r(%__MODULE__{l_size: 0, r_size: 0}), do: :error
def fetch_r(%__MODULE__{} = q), do: dequeue_r(q)
@doc """
Fetches the rear value from the queue.
If the queue is empty, raises `Yaq.EmptyQueueError`. Otherwise, returns the tuple `{value, q}`.
## Parameters
- `q`: Current queue
## Examples
iex> Yaq.new() |> Yaq.fetch_r!()
** (Yaq.EmptyQueueError) empty queue
iex> {response, queue} = Yaq.new([1, 2, 3]) |> Yaq.fetch_r!()
iex> response
1
iex> queue
#Yaq<length: 2>
"""
@spec fetch_r!(t()) :: {value(), t()}
def fetch_r!(%__MODULE__{l_size: 0, r_size: 0}), do: raise(Yaq.EmptyQueueError)
def fetch_r!(%__MODULE__{} = q), do: dequeue(q)
@doc """
Reverse the queue.
## Parameters
- `q`: Current queue
## Examples
iex> Yaq.new([1, 2, 3]) |> Yaq.reverse() |> Yaq.to_list()
[3, 2, 1]
"""
def reverse(%__MODULE__{} = q) do
%{q | l_data: q.r_data, l_size: q.r_size, r_data: q.l_data, r_size: q.r_size}
end
@doc """
Splits the queue in two. Takes up to the first `n` elements from the front of
the queue as the first queue and returns the remainder as the second.
If `n` is larger than the size of `q`, then the first queue will be `q` and the
second will be empty.
## Parameters
- `q`: Current queue
- `n`: Max size of the first queue
## Examples
iex> {left, right} = Yaq.new(1..10) |> Yaq.split(0)
iex> Yaq.to_list(left)
[]
iex> Yaq.to_list(right)
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
iex> {left, right} = Yaq.new(1..10) |> Yaq.split(10)
iex> Yaq.to_list(left)
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
iex> Yaq.to_list(right)
[]
iex> {left, right} = Yaq.new(1..10) |> Yaq.split(20)
iex> Yaq.to_list(left)
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
iex> Yaq.to_list(right)
[]
iex> {left, right} = Yaq.new(1..10) |> Yaq.split(3)
iex> Yaq.to_list(left)
[1, 2, 3]
iex> Yaq.to_list(right)
[4, 5, 6, 7, 8, 9, 10]
iex> {left, right} = Yaq.new(1..10) |> Yaq.split(8)
iex> Yaq.to_list(left)
[1, 2, 3, 4, 5, 6, 7, 8]
iex> Yaq.to_list(right)
[9, 10]
"""
@spec split(t(), non_neg_integer()) :: {t(), t()}
def split(%__MODULE__{} = q, 0), do: {Yaq.new(), q}
def split(%__MODULE__{l_size: l, r_size: r} = q, n) when l + r <= n, do: {q, Yaq.new()}
def split(%__MODULE__{l_size: l} = q, n) when l >= n do
{left, remainder} = Enum.split(q.l_data, n)
front = %__MODULE__{l_data: left, l_size: n, r_data: [], r_size: 0}
rear = %__MODULE__{
l_data: remainder,
l_size: q.l_size - n,
r_data: q.r_data,
r_size: q.r_size
}
{front, rear}
end
def split(%__MODULE__{} = q, n) do
# Bound the size
n = min(n, size(q))
{right, remainder} = Enum.split(q.r_data, n - size(q))
front = %__MODULE__{
l_data: q.l_data,
l_size: q.l_size,
r_data: remainder,
r_size: n - q.l_size
}
rear = %__MODULE__{l_data: [], l_size: 0, r_data: right, r_size: size(q) - n}
{front, rear}
end
@doc """
Return the number of elements in the queue.
## Parameters
- `q`: Current queue
## Examples
iex> Yaq.new() |> Yaq.size()
0
iex> Yaq.new(1..3) |> Yaq.size()
3
"""
@spec size(t()) :: non_neg_integer()
def size(%__MODULE__{} = q), do: q.l_size + q.r_size
@doc """
Return the elements of the queue as a list.
## Parameters
- `q`: Current queue
## Examples
iex> Yaq.new([1, 2, 3]) |> Yaq.to_list()
[1, 2, 3]
"""
@spec to_list(t()) :: list()
def to_list(%__MODULE__{l_data: [], r_data: []}), do: []
def to_list(%__MODULE__{} = q), do: q.l_data ++ Enum.reverse(q.r_data)
# Rebalance the queue internal data if necessary
@spec rebalance(t()) :: t()
defp rebalance(%__MODULE__{l_data: [], r_data: []} = q), do: q
defp rebalance(%__MODULE__{l_data: []} = q) do
l_size = round(q.r_size / 2)
r_size = q.r_size - l_size
{r_data, l_data} = Enum.split(q.r_data, r_size)
l_data = Enum.reverse(l_data)
%__MODULE__{l_data: l_data, r_data: r_data, l_size: l_size, r_size: r_size}
end
defp rebalance(%__MODULE__{r_data: []} = q) do
r_size = floor(q.l_size / 2)
l_size = q.l_size - r_size
{l_data, r_data} = Enum.split(q.l_data, l_size)
r_data = Enum.reverse(r_data)
%__MODULE__{l_data: l_data, r_data: r_data, l_size: l_size, r_size: r_size}
end
defp rebalance(q), do: q
end
defmodule Yaq.EmptyQueueError do
defexception message: "empty queue"
end
|
lib/yaq.ex
| 0.908312
| 0.596962
|
yaq.ex
|
starcoder
|
defmodule KantanCluster do
@moduledoc """
Form a simple Erlang cluster easily in Elixir.
"""
require Logger
@typedoc """
A node type. See `Node.start/3`.
"""
@type node_type :: :longnames | :shortnames
@typedoc """
Options for a cluster.
* `:node`
- the name of a node that you want to start
- default: `{:longnames, :"<EMAIL>"}` where `xxxx` is a random string, `yyyy` is the hostname of a machine
- examples:
- `"node1"`
- `{:longnames, :"node<EMAIL>"`}
- `{:shortnames, :"node1@nerves-mn00"`}
* `:cookie`
- [Erlang magic cookie] to form a cluster
- default: random cookie
* `:connect_to`
- a list of nodes we want our node to be connected with
- default: `[]`
[Erlang magic cookie]: https://erlang.org/doc/reference_manual/distributed.html#security
"""
@type option() ::
{:node, binary | {node_type, node}}
| {:cookie, atom}
| {:connect_to, node | [node]}
@doc """
Starts a node and attempts to connect it to specified nodes. Configuration
options can be specified as an argument
KantanCluster.start(
node: "node1",
cookie: :hello,
connect_to: [:"<EMAIL>"]
)
or in your `config/config.exs`.
config :kantan_cluster,
node: "node1",
cookie: :hello,
connect_to: [:"<EMAIL>"]
"""
@spec start([option]) :: :ok
def start(opts \\ []) when is_list(opts) do
ensure_distribution!(opts)
validate_hostname_resolution!()
KantanCluster.Config.get_cookie_option(opts) |> Node.set_cookie()
KantanCluster.Config.get_connect_to_option(opts) |> connect()
:ok
end
@doc """
Stops a node and all the connections.
"""
@spec stop :: :ok | {:error, :not_allowed | :not_found}
def stop() do
KantanCluster.NodeConnectorSupervisor.terminate_children()
Node.stop()
end
@doc """
Connects current node to specified nodes.
"""
@spec connect(node | [node]) :: {:ok, [pid]}
def connect(connect_to) when is_atom(connect_to), do: connect([connect_to])
def connect(connect_to) when is_list(connect_to) do
pids =
Enum.map(connect_to, &KantanCluster.NodeConnectorSupervisor.find_or_start_child_process/1)
{:ok, pids}
end
@doc """
Disconnects current node from speficied nodes.
"""
@spec disconnect(node | [node]) :: :ok
def disconnect(node_name) when is_atom(node_name), do: disconnect([node_name])
def disconnect(node_names) when is_list(node_names) do
:ok = Enum.each(node_names, &KantanCluster.NodeConnector.disconnect/1)
end
@doc """
Subscribes the caller to a given topic.
* topic - The topic to subscribe to, for example: "users:123"
"""
@spec subscribe(binary) :: :ok | {:error, any}
defdelegate subscribe(topic), to: KantanCluster.PubSub
@spec unsubscribe(binary) :: :ok
defdelegate unsubscribe(topic), to: KantanCluster.PubSub
@doc """
Broadcasts message on a given topic across the whole cluster.
* topic - The topic to broadcast to, ie: "users:123"
* message - The payload of the broadcast
"""
@spec broadcast(binary, any) :: :ok | {:error, any}
defdelegate broadcast(topic, message), to: KantanCluster.PubSub
@spec ensure_distribution!(keyword) :: :ok
defp ensure_distribution!(opts) do
if Node.alive?() do
Logger.info("distributed node already started: #{Node.self()}")
else
case System.cmd("epmd", ["-daemon"]) do
{_, 0} -> :ok
_ -> raise("could not start epmd (Erlang Port Mapper Driver).")
end
{type, name} = KantanCluster.Config.get_node_option(opts)
case Node.start(name, type) do
{:ok, _} -> :ok
{:error, reason} -> raise("could not start distributed node: #{inspect(reason)}")
end
end
end
import Record
defrecordp :hostent, Record.extract(:hostent, from_lib: "kernel/include/inet.hrl")
defp validate_hostname_resolution!() do
validate_hostname_resolution!(KantanCluster.Utils.shortnames_mode?())
end
defp validate_hostname_resolution!(true = _shortnames_mode) do
hostname = KantanCluster.Utils.node_hostname() |> to_charlist()
case :inet.gethostbyname(hostname) do
{:error, :nxdomain} ->
invalid_hostname!("your hostname \"#{hostname}\" does not resolve to an IP address")
{:ok, hostent(h_addrtype: :inet, h_addr_list: addresses)} ->
any_loopback? = Enum.any?(addresses, &match?({127, _, _, _}, &1))
unless any_loopback? do
invalid_hostname!(
"your hostname \"#{hostname}\" does not resolve to a loopback address (127.0.0.0/8)"
)
end
_ ->
:ok
end
end
defp validate_hostname_resolution!(false = _shortnames_mode), do: :ok
@spec invalid_hostname!(binary) :: no_return
defp invalid_hostname!(prelude) do
raise("""
#{prelude}, which indicates something wrong in your OS configuration.
Make sure your computer's name resolves locally or start KantanCluster using a long distribution name.
""")
end
end
|
lib/kantan_cluster.ex
| 0.879555
| 0.519948
|
kantan_cluster.ex
|
starcoder
|
defprotocol Collidable do
@moduledoc """
Protocol defining an interface for checking whether objects are colliding.
In the event of collision, it also defines functions for resolution.
"""
@doc """
Determine whether a pair of collidable entities are in collision.
## Examples
iex> Collidable.collision?(
...> Polygon.gen_regular_polygon(4, 2, 0, {0, 0}),
...> Polygon.gen_regular_polygon(4, 2, 0, {4, 4})
...> )
false
iex> Collidable.collision?(
...> Polygon.gen_regular_polygon(4, 2, 0, {0, 0}),
...> Polygon.gen_regular_polygon(4, 2, 0, {1, 1})
...> )
true
"""
def collision?(any, any)
@doc """
Determine how to resolve the collision.
Returns: {Vector, magnitude}
## Examples
iex> Collidable.resolution(
...> Polygon.gen_regular_polygon(4, 2, 0, {0, 0}),
...> Polygon.gen_regular_polygon(4, 2, 0, {4, 4})
...> )
nil
iex> Collidable.resolution(
...> Polygon.gen_regular_polygon(4, 2, 0, {0, 0}),
...> Polygon.gen_regular_polygon(4, 2, 0, {1, 1})
...> )
{%Collision.Vector.Vector2{x: 0.7071067811865475, y: 0.7071067811865475}, 1.414213562373095}
"""
def resolution(any, any)
@doc """
Resolve the collision.
Returns the the first entity and the translation of the second entity.
Returns: {Collidable_entity, Collidable_entity}
## Examples
iex> Collidable.resolve_collision(
...> Polygon.gen_regular_polygon(4, 2, 0, {0, 0}),
...> Polygon.gen_regular_polygon(4, 2, 0, {4, 4})
...> )
{%Polygon{convex: true, edges: [
%Edge{length: 2.8284271247461903, next: %Vertex{x: 0.0, y: 2.0}, point: %Vertex{x: 2.0, y: 0.0}},
%Edge{length: 2.8284271247461903, next: %Vertex{x: -2.0, y: 0.0}, point: %Vertex{x: 0.0, y: 2.0}},
%Edge{length: 2.8284271247461903, next: %Vertex{x: 0.0, y: -2.0}, point: %Vertex{x: -2.0, y: 0.0}},
%Edge{length: 2.8284271247461903, next: %Vertex{x: 2.0, y: 0.0}, point: %Vertex{x: 0.0, y: -2.0}}
], vertices: [
%Vertex{x: 2.0, y: 0.0}, %Vertex{x: 0.0, y: 2.0},
%Vertex{x: -2.0, y: 0.0}, %Vertex{x: 0.0, y: -2.0}
]}, %Polygon{convex: true, edges: [
%Edge{length: 2.8284271247461903, next: %Vertex{x: 4.0, y: 6.0}, point: %Vertex{x: 6.0, y: 4.0}},
%Edge{length: 2.8284271247461903, next: %Vertex{x: 2.0, y: 4.0}, point: %Vertex{x: 4.0, y: 6.0}},
%Edge{length: 2.8284271247461903, next: %Vertex{x: 4.0, y: 2.0}, point: %Vertex{x: 2.0, y: 4.0}},
%Edge{length: 2.8284271247461903, next: %Vertex{x: 6.0, y: 4.0}, point: %Vertex{x: 4.0, y: 2.0}}],
vertices: [%Vertex{x: 6.0, y: 4.0}, %Vertex{x: 4.0, y: 6.0},
%Vertex{x: 2.0, y: 4.0}, %Vertex{x: 4.0, y: 2.0}
]}}
iex> Collidable.resolve_collision(
...> Polygon.gen_regular_polygon(4, 2, 0, {0, 0}),
...> Polygon.gen_regular_polygon(4, 2, 0, {1, 1})
...> )
{%Polygon{edges: [
%Edge{length: 2.8284271247461903, next: %Vertex{x: 0.0, y: 2.0},
point: %Vertex{x: 2.0, y: 0.0}},
%Edge{length: 2.8284271247461903, next: %Vertex{x: -2.0, y: 0.0},
point: %Vertex{x: 0.0, y: 2.0}},
%Edge{length: 2.8284271247461903, next: %Vertex{x: 0.0, y: -2.0},
point: %Vertex{x: -2.0, y: 0.0}},
%Edge{length: 2.8284271247461903, next: %Vertex{x: 2.0, y: 0.0},
point: %Vertex{x: 0.0, y: -2.0}}
], vertices: [
%Vertex{x: 2.0, y: 0.0}, %Vertex{x: 0.0, y: 2.0},
%Vertex{x: -2.0, y: 0.0}, %Vertex{x: 0.0, y: -2.0}]
}, %Polygon{edges: [
%Edge{length: 2.8284271247461903, next: %Vertex{x: 2.0, y: 4.0},
point: %Vertex{x: 4.0, y: 2.0}},
%Edge{length: 2.8284271247461903, next: %Vertex{x: 0.0, y: 2.0},
point: %Vertex{x: 2.0, y: 4.0}},
%Edge{length: 2.8284271247461903, next: %Vertex{x: 2.0, y: 0.0},
point: %Vertex{x: 0.0, y: 2.0}},
%Edge{length: 2.8284271247461903, next: %Vertex{x: 4.0, y: 2.0},
point: %Vertex{x: 2.0, y: -0.0}}
], vertices: [
%Vertex{x: 4.0, y: 2.0}, %Vertex{x: 2.0, y: 4.0},
%Vertex{x: 0.0, y: 2.0}, %Vertex{x: 2.0, y: 0.0}]}}
"""
def resolve_collision(any, any)
end
|
lib/collidable.ex
| 0.897313
| 0.710377
|
collidable.ex
|
starcoder
|
defmodule Ecbolic.Pretty do
@moduledoc """
This module provides small formating for when you want
to display your docs.
All functions intended for formating can be supplied
with a string, which allows you to use function names,
help and such. The default format is `":f - :h`
- :f
Inserts the name function of the function
- :h
Inserts the documentation of the function
- :u
Inserts the usage of a function
- :a
Aligns all documentations up to this point. This only
works with functions that format multiple functions.
## Example output
```elixir
Ecbolic.Pretty.format([:hello, :long_function_name], ":f:a - :h")
#=>
"hello - returns `world`",
"long_function_name - Long description"
```
"""
alias Ecbolic.{Store, Help}
@doc """
Formats all functions according to the given format
"""
@spec format_all(String.t()) :: String.t()
def format_all(format \\ ":f - :h") do
Store.all()
|> format_entries(format)
end
@doc """
Formats all functions according to the given format,
but clustered by their group.
"""
@spec format_all_groups(String.t()) :: %{Ecbolic.atom_or_string() => String.t()}
def format_all_groups(format \\ ":f - :h") do
{:ok, groups} = Store.all_grouped()
for {k, v} <- groups, into: %{}, do: {k, format_entries(v, format)}
end
@doc """
Formats all functions that belong to the given group
"""
@spec format_group(Ecbolic.atom_or_string(), String.t()) :: String.t()
def format_group(group, format \\ ":f - :h") do
with {:ok, entries} <- Store.group(group) do
format_entries(entries, format)
end
end
@doc """
Formats a single or multiple given functions
"""
def format(list, format \\ ":f - :h")
def format([], _), do: []
def format(nil, _), do: []
@spec format([Ecbolic.atom_or_string()], String.t()) :: String.t()
def format(names, format) when is_list(names) do
with {:ok, entries} <- Store.lookup(names) do
format_entries(entries, format)
end
end
@spec format(Ecbolic.atom_or_string(), String.t()) :: String.t()
def format(name, format) do
with {:ok, help} <- Store.lookup(name) do
apply_tokens(help, format)
|> String.replace(":a", "")
end
end
@doc """
Aligns multiple strings using tokens
This function adds spaces where the tokens are places,
so that the Strings before the token are aligned
## Examples
iex>Ecbolic.Pretty.align [
...> "aaaaaaaaaa :a - 100.00:a%",
...> "aaaaaaaaa :a - 20.50:a%",
...> "aaaaaaaa :a - 19.53:a%",
...> "aaaaaaa :a - 17.42:a%",
...> "aaaaaa :a - 17.29:a%",
...> "aaaaa :a - 16.48:a%",
...> "aaaa :a - 15.75:a%",
...> "aaa :a - 15.10:a%"
...>]
[
"aaaaaaaaaa - 100.00%",
"aaaaaaaaa - 20.50 %",
"aaaaaaaa - 19.53 %",
"aaaaaaa - 17.42 %",
"aaaaaa - 17.29 %",
"aaaaa - 16.48 %",
"aaaa - 15.75 %",
"aaa - 15.10 %"
]
"""
@spec align([String.t()]) :: [String.t()]
def align([]), do: []
def align(entries) do
{to_split, rejected} =
entries
|> Enum.split_with(&String.contains?(&1, ":a"))
split =
to_split
|> Enum.map(&String.split(&1, ":a", parts: 2))
if Enum.empty?(split) do
entries
else
# Transposing
[to_align, rest] = transpose(split)
max_length =
to_align
|> Enum.map(&String.length/1)
|> Enum.max()
aligned =
to_align
|> Enum.map(&(&1 <> String.duplicate(" ", max_length - String.length(&1))))
new_entries =
[aligned, rest]
|> transpose()
|> Enum.map(&Enum.join/1)
rejected ++ align(new_entries)
end
end
defp format_entries(entries, format) do
entries
|> Enum.map(&apply_tokens(&1, format))
|> align()
|> Enum.sort(&lexographical_compare/2)
|> Enum.join("\n")
end
@doc false
def lexographical_compare("", _), do: false
def lexographical_compare(_, ""), do: true
def lexographical_compare(str1, str2) do
{f1, r1} = String.split_at(str1, 1)
{f2, r2} = String.split_at(str2, 1)
cond do
f1 == f2 -> lexographical_compare(r1, r2)
String.downcase(f1) == String.downcase(f2) and f1 > f2 -> true
String.downcase(f1) == String.downcase(f2) -> false
String.downcase(f1) > String.downcase(f2) -> false
true -> true
end
end
defp apply_tokens(help, format) do
format
|> String.replace(":f", to_string(help.name))
|> String.replace(":g", to_string(help.group))
|> String.replace(":u", help.usage || "")
|> String.replace(":h", help.description || "")
end
defp transpose(list) do
list
|> List.zip()
|> Enum.map(&Tuple.to_list/1)
end
end
|
lib/ecbolic/pretty.ex
| 0.848628
| 0.854642
|
pretty.ex
|
starcoder
|
defmodule Dayron.Request do
@moduledoc """
Defines a struct to store data necessary to send a request using an adapter.
Also provides helper functions to map request data to adapter method calls.
"""
defstruct method: :get, url: "", body: %{}, headers: [], options: []
@type t :: %__MODULE__{method: atom, url: binary, body: map, headers: list,
options: list}
@doc """
Given a request struct and an adapter, calls the correct adapter function
passing the correct parameters. Return a tuple with {request, response}.
"""
def send(request, adapter) do
start = current_time
opts = request.options
adapter_response = case request.method do
:get -> adapter.get(request.url, request.headers, opts)
:post -> adapter.post(request.url, request.body, request.headers, opts)
:patch -> adapter.patch(request.url, request.body, request.headers, opts)
:delete -> adapter.delete(request.url, request.headers, opts)
end
{request, response_with_time(adapter_response, start)}
end
defp response_with_time({:ok, response}, start) do
%{response | elapsed_time: time_diff(start, current_time)}
end
defp response_with_time({_, response}, _), do: response
defp current_time, do: :os.timestamp()
defp time_diff(start, stop), do: :timer.now_diff(stop, start)
end
defimpl Inspect, for: Dayron.Request do
@moduledoc """
Implementing Inspect protocol for Dayron.Request
It changes the output for pretty:true option
## Example:
> inspect request, pretty: true
GET http://api.example.com
Options:
Params: q="qu", page=2
Timeout: 8000
Body:
Name: "Dayse"
Headers:
access_token: "token"
"""
import Inspect.Algebra
@tab_width 5
def inspect(request, %Inspect.Opts{pretty: true}) do
concat([
title(request),
"\n",
nest(
glue(
"Options:",
list_to_doc(request.options, 2)
), @tab_width
),
"\n",
nest(
glue(
"Body:",
list_to_doc(request.body, 2)
), @tab_width
),
"\n",
nest(
glue(
"Headers:",
list_to_doc(request.headers, 2)
), @tab_width
)
])
end
def inspect(request, opts), do: Inspect.Any.inspect(request, opts)
defp title(request) do
glue method_to_string(request.method), request.url
end
defp method_to_string(nil), do: "NO_METHOD"
defp method_to_string(method) when is_atom(method) do
method |> Atom.to_string |> String.upcase
end
defp list_to_doc(nil, _level), do: "-"
defp list_to_doc([], _level), do: "-"
defp list_to_doc(value, _level) when is_binary(value), do: inspect(value)
defp list_to_doc(map = %{}, level), do: list_to_doc(Map.to_list(map), level)
defp list_to_doc(list, level) do
list
|> Enum.map(fn
{key, value} when is_list(value) ->
nest(
glue(
concat(key |> Atom.to_string |> String.capitalize, ":"),
list_to_doc(value, level + 1)
), @tab_width * level
)
{key, value} ->
concat(Atom.to_string(key) <> "=", inspect(value))
value -> inspect(value)
end)
|> fold_doc(&glue(&1,&2))
end
end
|
lib/dayron/request.ex
| 0.830457
| 0.514644
|
request.ex
|
starcoder
|
defmodule Day04.Board do
use Bitwise
defstruct state: 0, positions: %{}
@row1 String.to_integer("0000000000000000000011111", 2)
@row2 String.to_integer("0000000000000001111100000", 2)
@row3 String.to_integer("0000000000111110000000000", 2)
@row4 String.to_integer("0000011111000000000000000", 2)
@row5 String.to_integer("1111100000000000000000000", 2)
@col1 String.to_integer("1000010000100001000010000", 2)
@col2 String.to_integer("0100001000010000100001000", 2)
@col3 String.to_integer("0010000100001000010000100", 2)
@col4 String.to_integer("0001000010000100001000010", 2)
@col5 String.to_integer("0000100001000010000100001", 2)
def parse(lines) do
%__MODULE__{
state: 0,
positions: lines |> String.split() |> parse_positions(%{}, 1)
}
end
def call(board, number) do
mask = Map.get(board.positions, number, 0)
%__MODULE__{positions: Map.delete(board.positions, number), state: board.state ||| mask}
end
def call_all(board, numbers) do
numbers
|> Enum.reduce_while({board, 1}, fn n, {b, i} ->
new_board = call(b, n)
if winning?(new_board) do
{:halt, {i, String.to_integer(n) * unmarked_sum(new_board)}}
else
{:cont, {new_board, i + 1}}
end
end)
end
def winning?(%__MODULE__{state: s}) when (s &&& @row1) == @row1, do: true
def winning?(%__MODULE__{state: s}) when (s &&& @row2) == @row2, do: true
def winning?(%__MODULE__{state: s}) when (s &&& @row3) == @row3, do: true
def winning?(%__MODULE__{state: s}) when (s &&& @row4) == @row4, do: true
def winning?(%__MODULE__{state: s}) when (s &&& @row5) == @row5, do: true
def winning?(%__MODULE__{state: s}) when (s &&& @col1) == @col1, do: true
def winning?(%__MODULE__{state: s}) when (s &&& @col2) == @col2, do: true
def winning?(%__MODULE__{state: s}) when (s &&& @col3) == @col3, do: true
def winning?(%__MODULE__{state: s}) when (s &&& @col4) == @col4, do: true
def winning?(%__MODULE__{state: s}) when (s &&& @col5) == @col5, do: true
def winning?(_), do: false
def unmarked_sum(%__MODULE__{positions: positions}) do
positions
|> Map.keys()
|> Enum.map(&String.to_integer/1)
|> Enum.reduce(&Kernel.+/2)
end
defp parse_positions([], positions, _mask), do: positions
defp parse_positions([n | rest], positions, mask) do
parse_positions(rest, Map.put(positions, n, mask), mask <<< 1)
end
end
|
year_2021/lib/day_04/board.ex
| 0.692746
| 0.565209
|
board.ex
|
starcoder
|
defmodule Protobuf.Protoc.Generator.Util do
@moduledoc false
alias Protobuf.Protoc.Context
@locals_without_parens [field: 2, field: 3, oneof: 2, rpc: 3, extend: 4, extensions: 1]
defguardp is_nil_or_nonempty_string(term) when is_nil(term) or (is_binary(term) and term != "")
@spec mod_name(Context.t(), [String.t()]) :: String.t()
def mod_name(%Context{} = ctx, ns) when is_list(ns) do
ns = Enum.map(ns, &proto_name_to_module_name/1)
parts =
case camelcase_prefix(ctx) do
"" -> ns
prefix -> [prefix | ns]
end
Enum.join(parts, ".")
end
defp camelcase_prefix(%{package_prefix: nil, module_prefix: nil, package: nil} = _ctx),
do: ""
defp camelcase_prefix(%{package_prefix: prefix, module_prefix: nil, package: package} = _ctx),
do: proto_name_to_module_name(prepend_package_prefix(prefix, package))
defp camelcase_prefix(%{module_prefix: module_prefix} = _ctx),
do: proto_name_to_module_name(module_prefix)
defp proto_name_to_module_name(name) when is_binary(name) do
name
|> String.split(".")
|> Enum.map_join(".", &Macro.camelize/1)
end
@spec prepend_package_prefix(String.t() | nil, String.t() | nil) :: String.t()
def prepend_package_prefix(prefix, package)
when is_nil_or_nonempty_string(prefix) and is_nil_or_nonempty_string(package) do
[prefix, package]
|> Enum.reject(&is_nil/1)
|> Enum.join(".")
end
@spec options_to_str(%{optional(atom()) => atom() | integer() | String.t()}) :: String.t()
def options_to_str(opts) when is_map(opts) do
opts
|> Enum.reject(fn {_key, val} -> val in [nil, false] end)
|> Enum.map_join(", ", fn {key, val} -> "#{key}: #{print(val)}" end)
end
defp print(atom) when is_atom(atom), do: inspect(atom)
defp print(val), do: val
@spec type_from_type_name(Context.t(), String.t()) :: String.t()
def type_from_type_name(%Context{dep_type_mapping: mapping}, type_name)
when is_binary(type_name) do
# The doc says there's a situation where type_name begins without a `.`, but I never got that.
# Handle that later.
metadata =
mapping[type_name] ||
raise "There's something wrong to get #{type_name}'s type, please contact with the lib author."
metadata[:type_name]
end
@spec descriptor_fun_body(desc :: struct()) :: String.t()
def descriptor_fun_body(%mod{} = desc) do
desc
|> Map.from_struct()
|> Enum.filter(fn {_key, val} -> not is_nil(val) end)
|> mod.new()
|> mod.encode()
|> mod.decode()
|> inspect(limit: :infinity)
end
@spec format(String.t()) :: String.t()
def format(code) when is_binary(code) do
code
|> Code.format_string!(locals_without_parens: @locals_without_parens)
|> IO.iodata_to_binary()
end
end
|
lib/protobuf/protoc/generator/util.ex
| 0.760117
| 0.409309
|
util.ex
|
starcoder
|
defmodule Asteroid.Client do
use AttributeRepository.Resource, otp_app: :asteroid
@moduledoc """
`AttributeRepository.Resource` for clients
Client refers to an OAuth2 client, that is a **application** (and not a machine). There are 2
types of clients:
- those who can keep a secret secret: *confidential clients* (such as a server). Usually there
is one instance of this application running (even though it has several servers running), so in
this case 1 client = 1 machine
- those who can't: *public clients* (mobile applications, SPAs...). In this case there are
multiple instances of the same client running, used by different subjects
## Field naming
The following fields have standardised meaning:
- `"client_id"`: the client identifier (as in OAuth2) (`String.t()`)
- `"client_name"`: a map whose keys are the language code and the values the localized names
- `"client_uri"`: a map whose keys are the language code and the values the localized URIs
- `"logo_uri"`: a map whose keys are the language code and the values the localized logos
- `"tos_uri"`: a map whose keys are the language code and the values the localized TOS URIs
- `"policy_uri"`: a map whose keys are the language code and the values the localized policy
URIs
- `"client_secret"`: the client secret (`String.t()`). Can be the plain secret, or the
`t:Expwd.Hashed.Portable.t/0`
- `"client_type"`: `"public"` or `"confidential"`, depending on the client's type
- `"grant_types"`: the list of grant types (`t:Asteroid.OAuth2.grant_type_str/0`) that the
client is allowed to use
- `"response_types"`: the list of response types (`t:Asteroid.OAuth2.response_type_str/0`) that
the client is allowed to use
- `"redirect_uris"`: the list of OAuth2 / OpenID Connect redirect URIs (`[String.t()]`)
- `"scope"`: a list of OAuth2 scopes that the client can use when requesting tokens. Scopes
starting with the string `"asteroid."` are special permissions used to access Asteroid
endpoints. See also []()
- `"token_endpoint_auth_method"`: a `t:Asteroid.Oauth2.Endpoint.auth_method_str/0`
as specified in RFC7591
- `"contacts"`: list of email addresses (`[String.t()]`)
- `"jwks_uri"`: the JWKs URI of the client (`String.t()`)
- `"jwks"`: the JWKs of the client (a list of JWKs)
- `"software_id"`: the software ID (`String.t()`). Not used by Asteroid
- `"software_version"`: the software version (`String.t()`). Not used by Asteroid
- `"application_type"`: the application type (either `nil`, `"web"` or `"native"`)
- `"sector_identifier_uri"`: the sector identifier URI (`String.t()`)
- `"subject_type"`: the subject type (either `nil`, `"public"` or `"pairwise"`))
- `"id_token_signed_response_alg"`: refer to the OpenID Connect Client Registration specification
- `"id_token_encrypted_response_alg"`: refer to the OpenID Connect Client Registration specification
- `"id_token_encrypted_response_enc"`: refer to the OpenID Connect Client Registration specification
- `"userinfo_signed_response_alg"`: refer to the OpenID Connect Client Registration specification
- `"userinfo_encrypted_response_alg"`: refer to the OpenID Connect Client Registration specification
- `"userinfo_encrypted_response_enc"`: refer to the OpenID Connect Client Registration specification
- `"request_object_signed_response_alg"`: refer to the OpenID Connect Client Registration specification
- `"request_object_encrypted_response_alg"`: refer to the OpenID Connect Client Registration specification
- `"request_object_encrypted_response_enc"`: refer to the OpenID Connect Client Registration specification
- `"default_max_age"`: refer to the OpenID Connect Client Registration specification
- `"require_auth_time"`: refer to the OpenID Connect Client Registration specification
- `"default_acr_values"`: refer to the OpenID Connect Client Registration specification
- `"__asteroid_created_by_client_id"`: the `String.t()` client id of the client that has
initially created this client using the `/register` endpoint (may not have a value if the
client was created by another mean)
- `"__asteroid_oauth2_flow_ropc_issue_refresh_token_init"`: a `boolean()` set to true if a
refresh token is to be issued at the first request of the ROPC flow
- `"__asteroid_oauth2_flow_ropc_issue_refresh_token_refresh"`: a `boolean()` set to true if a
refresh token is to be issued when refresh tokens in the ROPC flow
- `"__asteroid_oauth2_flow_ropc_refresh_token_lifetime"`: a `non_neg_integer()` set to the
lifetime duration of a refresh token in the ROPC flow
- `"__asteroid_oauth2_flow_ropc_access_token_lifetime"`: a `non_neg_integer()` set to the
lifetime duration of an access token in the ROPC flow
- `"__asteroid_oauth2_flow_ropc_access_token_serialization_format"`: the
`t:Asteroid.Token.serialization_format_str/0` serialization format for the ROPC flow
- `"__asteroid_oauth2_flow_ropc_access_token_signing_key"`: the
`t:Asteroid.Crypto.Key.name/0` signing key name for access tokens in the ROPC flow
- `"__asteroid_oauth2_flow_ropc_access_token_signing_alg"`: the
`t:Asteroid.Crypto.Key.jws_alg/0` signing algorithm for access tokens in the ROPC flow
- `"__asteroid_oauth2_flow_client_credentials_issue_refresh_token_init"`: a `boolean()` set to
`true` if a refresh token is to be issued at the first request of the client credentials flow
- `"__asteroid_oauth2_flow_client_credentials_issue_refresh_token_refresh"`: a `boolean()` set
to `true` if a refresh token is to be issued when refresh tokens in the client_credentials flow
- `"__asteroid_oauth2_flow_client_credentials_refresh_token_lifetime"`: a `non_neg_integer()`
set to the lifetime duration of a refresh token in the client credentials flow
- `"__asteroid_oauth2_flow_client_credentials_access_token_lifetime"`: a `non_neg_integer()`
set to the lifetime duration of an access token in the client credentials flow
- `"__asteroid_oauth2_flow_client_credentials_access_token_serialization_format"`: the
`t:Asteroid.Token.serialization_format_str/0` serialization format for the client credentials
flow
- `"__asteroid_oauth2_flow_client_credentials_access_token_signing_key"`: the
`t:Asteroid.Crypto.Key.name/0` signing key name for access tokens in the client credentials flow
- `"__asteroid_oauth2_flow_client_credentials_access_token_signing_alg"`: the
`t:Asteroid.Crypto.Key.jws_alg/0` signing algorithm for access tokens in the client credentials
flow
- `"__asteroid_oauth2_flow_authorization_code_authorization_code_lifetime"`: a
`non_neg_integer()` set to the lifetime duration of an authorization in the code flow
- `"__asteroid_oauth2_flow_authorization_code_issue_refresh_token_init"`: a `boolean()` set to
true if a refresh token is to be issued in the authorization code flow when presenting the
authorization code
- `"__asteroid_oauth2_flow_authorization_code_issue_refresh_token_refresh"`: a `boolean()` set
to true if a refresh token is to be issued when refreshing tokens in the authorization code flow
- `"__asteroid_oauth2_flow_authorization_code_access_token_lifetime"`: a `non_neg_integer()`
set to the lifetime duration of an access token in the authorization code flow
- `"__asteroid_oauth2_flow_authorization_code_access_token_serialization_format"`: the
`t:Asteroid.Token.serialization_format_str/0` serialization format for the authorization code
flow
- `"__asteroid_oauth2_flow_authorization_code_access_token_signing_key"`: the
`t:Asteroid.Crypto.Key.name/0` signing key name for access tokens in the authorization code flow
- `"__asteroid_oauth2_flow_authorization_code_access_token_signing_alg"`: the
`t:Asteroid.Crypto.Key.jws_alg/0` signing algorithm for access tokens in the authorization code
flow
- `"__asteroid_oauth2_flow_authorization_code_refresh_token_lifetime"`: a `non_neg_integer()`
set to the lifetime duration of a refresh token in the authorization code flow
- `"__asteroid_oauth2_flow_implicit_access_token_lifetime"`: a `non_neg_integer()`
set to the lifetime duration of an access token in the implicit flow
- `"__asteroid_oauth2_flow_implicit_access_token_serialization_format"`: the
`t:Asteroid.Token.serialization_format_str/0` serialization format for the implicit flow
- `"__asteroid_oauth2_flow_implicit_access_token_signing_key"`: the
`t:Asteroid.Crypto.Key.name/0` signing key name for access tokens in the implicit flow
- `"__asteroid_oauth2_flow_implicit_access_token_signing_alg"`: the
`t:Asteroid.Crypto.Key.jws_alg/0` signing algorithm for access tokens in the implicit flow
- `"__asteroid_endpoint_introspect_claims_resp"`: the list of `String.t()` claims to be
returned from the `"/introspect"` endpoint
- `"__asteroid_oauth2_mandatory_pkce_use"`: a `boolean()` indicating
whether the client shall use PKCE or not (defaults to not being forced to use PKCE)
- `"__asteroid_oauth2_endpoint_register_allowed_token_endpoint_auth_method"`: a list of
`t:Asteroid.OAuth2.Endpoint.auth_method_str/0` that restricts the token endpoint auth methods
that can be assigned to a new client created by this client. If absent or set to `nil`, all
supported methods
(#{Asteroid.Config.link_to_option(:oauth2_endpoint_token_auth_methods_supported_callback)})
can be assigned to new clients
- `"__asteroid_oauth2_endpoint_register_allowed_grant_types"`: a list of
`t:Asteroid.OAuth2.grant_type_str/0` of grant types that can be assigned on newly created
clients on the client registration endpoint. This is opt-in: when not set to a client,
it will not be capable of creating new clients
- `"__asteroid_oauth2_endpoint_register_allowed_response_types"`: a list of
`t:Asteroid.OAuth2.response_type_str/0` of response types that can be assigned to newly created
clients on the client registration endpoint. This is opt-in: when not set to a client,
it will not be capable of creating new clients
- `"__asteroid_oauth2_endpoint_register_allowed_scopes"`: a list of scopes that can be
assigned to newly created clients on the client registration endpoint. If not set, defaults
to the available scopes for the granted flows (determined from the grant types)
- `"__asteroid_oauth2_endpoint_register_auto_scopes"`: a list of scopes that are automatically
assigned to newly created clients, in addition to those requested. The existence of these
automatically granted scopes are *not checked* against the configured scopes, which means
that scopes that are not configured in the configuration files can be granted through this
option
- `"__asteroid_oauth2_endpoint_register_additional_metadata_fields"`: a list of strings
for the additional metadata fields that will be saved upon client creation request
- `"__asteroid_oauth2_endpoint_register_default_token_endpoint_auth_method"`: a
`t:Asteroid.OAuth2.Endpoint.auth_method_str/0` that replaces the specification's default
(`"client_secret_basic"`) for new clients created by this client
- `"__asteroid_oauth2_endpoint_register_default_grant_types"`: a list of
`t:Asteroid.OAuth2.grant_type_str/0` that replaces the specification's default
(`["authorization_code"]`) for new clients created by this client
- `"__asteroid_oauth2_endpoint_register_default_response_types"`: a list of
`t:Asteroid.OAuth2.response_type_str/0` that replaces the specification's default
(`["code"]`) for new clients created by this client
- `"__asteroid_oauth2_flow_device_authorization_device_code_lifetime"`: a `non_neg_integer()`
set to the lifetime duration of a device code in the device authorization flow
- `"__asteroid_oauth2_flow_device_authorization_issue_refresh_token_init"`: a `boolean()` set
to true if a refresh token is to be issued at the first request of the device authorization
flow
- `"__asteroid_oauth2_flow_device_authorization_issue_refresh_token_refresh"`: a `boolean()`
set to true if a refresh token is to be issued when refresh tokens in the device authorization
flow
- `"__asteroid_oauth2_flow_device_authorization_refresh_token_lifetime"`: a `non_neg_integer()`
set to the lifetime duration of a refresh token in the device authorization flow
- `"__asteroid_oauth2_flow_device_authorization_access_token_lifetime"`: a `non_neg_integer()`
set to the lifetime duration of an access token in the device authorization flow
- `"__asteroid_oauth2_flow_device_authorization_access_token_serialization_format"`: the
`t:Asteroid.Token.serialization_format_str/0` serialization format for the device authorization
flow
- `"__asteroid_oauth2_flow_device_authorization_access_token_signing_key"`: the
`t:Asteroid.Crypto.Key.name/0` signing key name for access tokens in the device authorization
flow
- `"__asteroid_oauth2_flow_device_authorization_access_token_signing_alg"`: the
`t:Asteroid.Crypto.Key.jws_alg/0` signing algorithm for access tokens in the device
authorization flow
- `"__asteroid_oidc_flow_authorization_code_authorization_code_lifetime"`: a
`non_neg_integer()` set to the lifetime duration of an authorization code in the OIDC code flow
- `"__asteroid_oidc_flow_authorization_code_access_token_lifetime"`: a `non_neg_integer()`
set to the lifetime duration of an access token in the OIDC authorization code flow
- `"__asteroid_oidc_flow_authorization_code_access_token_serialization_format"`: the
`t:Asteroid.Token.serialization_format_str/0` serialization format for the OIDC authorization
code flow
- `"__asteroid_oidc_flow_authorization_code_access_token_signing_key"`: the
`t:Asteroid.Crypto.Key.name/0` signing key name for access tokens in the OIDC authorization
code flow
- `"__asteroid_oidc_flow_authorization_code_access_token_signing_alg"`: the
`t:Asteroid.Crypto.Key.jws_alg/0` signing algorithm for access tokens in the OIDC authorization
code flow
- `"__asteroid_oidc_flow_authorization_code_issue_refresh_token_init"`: a `boolean()` set to
true if a refresh token is to be issued in the OIDC authorization code flow when presenting the
authorization code
- `"__asteroid_oidc_flow_authorization_code_issue_refresh_token_refresh"`: a `boolean()` set
to true if a refresh token is to be issued when refreshing tokens in the OIDC authorization
code flow
- `"__asteroid_oidc_flow_hybrid_issue_refresh_token_init"`: a `boolean()` set to
true if a refresh token is to be issued in the OIDC hybrid flow when presenting the
authorization code
- `"__asteroid_oidc_flow_hybrid_issue_refresh_token_refresh"`: a `boolean()` set
to true if a refresh token is to be issued when refreshing tokens in the OIDC hybrid flow
- `"__asteroid_oidc_flow_authorization_code_refresh_token_lifetime"`: a `non_neg_integer()`
set to the lifetime duration of a refresh token in the OIDC authorization code flow
- `"__asteroid_oidc_flow_hybrid_refresh_token_lifetime"`: a `non_neg_integer()`
set to the lifetime duration of a refresh token in the OIDC hybrid flow
- `"__asteroid_oidc_flow_authorization_code_access_token_lifetime"`: a `non_neg_integer()`
set to the lifetime duration of an access token in the OIDC authorization code flow
- `"__asteroid_oidc_flow_implicit_access_token_lifetime"`: a `non_neg_integer()`
set to the lifetime duration of an access token in the OIDC implicit flow
- `"__asteroid_oidc_flow_hybrid_access_token_lifetime"`: a `non_neg_integer()`
set to the lifetime duration of an access token in the OIDC hybrid flow
- `"__asteroid_oidc_flow_authorization_code_id_token_lifetime"`: a `non_neg_integer()`
set to the lifetime duration of an ID token in the OIDC authorization code flow
- `"__asteroid_oidc_flow_implicit_id_token_lifetime"`: a `non_neg_integer()`
set to the lifetime duration of an ID token in the OIDC implicit flow
- `"__asteroid_oidc_flow_hybrid_id_token_lifetime"`: a `non_neg_integer()`
set to the lifetime duration of an ID token in the OIDC hybrid flow
- `"__asteroid_oidc_flow_authorization_code_issue_id_token_refresh"`: a `boolean()` set
to true if an ID token is to be issued when refreshing tokens in the OIDC authorization code
flow
- `"__asteroid_oidc_flow_hybrid_issue_id_token_refresh"`: a `boolean()` set
to true if an ID token is to be issued when refreshing tokens in the OIDC hybrid flow
- `"__asteroid_oidc_flow_hybrid_authorization_code_lifetime"`: a
`non_neg_integer()` set to the lifetime duration of an authorization code in the OIDC code flow
- `"__asteroid_oidc_flow_implicit_access_token_serialization_format"`: the
`t:Asteroid.Token.serialization_format_str/0` serialization format for the OIDC implicit flow
- `"__asteroid_oidc_flow_hybrid_access_token_serialization_format"`: the
`t:Asteroid.Token.serialization_format_str/0` serialization format for the OIDC hybrid flow
- `"__asteroid_oidc_flow_implicit_access_token_signing_key"`: the
`t:Asteroid.Crypto.Key.name/0` signing key name for access tokens in the OIDC implicit flow
- `"__asteroid_oidc_flow_hybrid_access_token_signing_key"`: the
`t:Asteroid.Crypto.Key.name/0` signing key name for access tokens in the OIDC hybrid flow
- `"__asteroid_oidc_flow_implicit_access_token_signing_alg"`: the
`t:Asteroid.Crypto.Key.jws_alg/0` signing algorithm for access tokens in the OIDC implicit flow
- `"__asteroid_oidc_flow_hybrid_access_token_signing_alg"`: the
`t:Asteroid.Crypto.Key.jws_alg/0` signing algorithm for access tokens in the OIDC hybrid flow
## Configuration
This modules uses the default configuration of `AttributeRepository.Resource` (see `config/1`).
## Security considerations
- When using client secrets, make sure to 1) generate their secrets randomly and 2) store them
in an appropriate manner. See the [expwd](https://github.com/tanguilp/expwd) library.
- You **SHOULD NOT** issue client secrets to public clients
[RFC6749 - 10.1. Client Authentication](https://tools.ietf.org/html/rfc6749#section-10.1)
"""
@doc """
Returns the JWKs of a client
Note that the `"jwks_uri"` field takes precedence over the `"jwks"` field. If `"jwks"` is
somehow unreachable, it does **not** fallback to the `"jwks"` field but returns an error
instead.
"""
@spec get_jwks(t()) :: {:ok, [Asteroid.Crypto.Key.t()]} | {:error, any()}
def get_jwks(client) do
client = fetch_attributes(client, ["jwks", "jwks_uri"])
if client.attrs["jwks_uri"] do
case JWKSURIUpdater.get_keys(client.attrs["jwks_uri"]) do
{:ok, keys} ->
keys
{:error, _} = error ->
error
end
else
{:ok, client.attrs["jwks"] || []}
end
end
end
|
lib/asteroid/client.ex
| 0.90136
| 0.555375
|
client.ex
|
starcoder
|
defmodule Advent20.OperationOrder do
@moduledoc """
Day 18: Operation Order
Phew this day was hard. I am not used to parsing. I read up on algorithms for parsing math expressions
and read about the difference between infix and postfix notation. Infix is e.g. 2+4*(3+4) and the same
in postfix would be written as 2434+*+ which at first seems really weird.
However! The postfix notation is really easy to evaluate on a computer. It can be done using only a single
stack and a recursive function (as done in this module at the bottom in `evaluate_postfix/1`).
So. This D18 consists of a infix to postfix converter which takes custom operator precedences for only the
+ and * operators. It converts all input equations to postfix using these precedences and then evaluates
the postfix equations.
The D18 puzzles is actually our normal math, but with custom operator precedences.
In part 1 the precedences are the same for + and * (I have set both to 1)
In part 2 the + operator takes precedence over *, so the precedence is 2 for + and 1 for -
"""
def parse(input) do
input
|> String.split("\n", trim: true)
|> Enum.map(fn string -> "(#{string})" end)
end
def part_1(input) do
calculate(input, %{*: 1, +: 1})
end
def part_2(input) do
calculate(input, %{*: 1, +: 2})
end
def calculate(input, operator_precedences) do
input
|> parse()
|> Enum.map(&tokenize/1)
|> Enum.map(&to_postfix(&1, operator_precedences))
|> Enum.map(&evaluate_postfix/1)
|> Enum.sum()
end
def tokenize(string) do
string
|> String.codepoints()
|> Enum.reject(&(&1 == " "))
|> Enum.map(fn
"+" -> :+
"*" -> :*
"(" -> :start_parens
")" -> :end_parens
char -> String.to_integer(char)
end)
end
def to_postfix(tokens, precedence) do
do_parse_rpe(tokens, [], [], precedence)
end
# Handle a number by pushing it into postfix
defp do_parse_rpe([number | equation], postfix, opstack, precedence) when is_integer(number) do
do_parse_rpe(equation, [number | postfix], opstack, precedence)
end
# Encountering a start parenthesis, just push it to the opstack
defp do_parse_rpe([:start_parens | equation], postfix, opstack, precedence) do
do_parse_rpe(equation, postfix, [:start_parens | opstack], precedence)
end
# End parenthesis, pop from the opstack onto the postfix until the start parens is encountered
# Neither start or end parens are added to the postfix
defp do_parse_rpe([:end_parens | equation], postfix, opstack, precedence) do
{remaning_parens_ops, [:start_parens | new_opstack]} = Enum.split_while(opstack, fn x -> x != :start_parens end)
do_parse_rpe(equation, Enum.reverse(remaning_parens_ops) ++ postfix, new_opstack, precedence)
end
# Handle operations with + or * -->
# If the first element in the opstack is a parens, just push the operator to the opstack
defp do_parse_rpe([operator | equation], postfix, [:start_parens | _] = opstack, precedence)
when operator in [:*, :+] do
do_parse_rpe(equation, postfix, [operator | opstack], precedence)
end
# If the operator is * or + and we have don't have a parens first in the opstack
defp do_parse_rpe([operator | equation], postfix, opstack, precedence) when operator in [:*, :+] do
operator_precedence = Map.fetch!(precedence, operator)
first_opstack_element_precedence = Map.fetch!(precedence, List.first(opstack))
# If the operator precedence is greater than or equal to the next operator in the opstack, just push it to the opstack
# Otherwise, pop the opstack operator into the postfix and put the operator into the opstack
if operator_precedence > first_opstack_element_precedence do
do_parse_rpe(equation, postfix, [operator | opstack], precedence)
else
[opstack_operator | opstack_rest] = opstack
do_parse_rpe(equation, [opstack_operator | postfix], [operator | opstack_rest], precedence)
end
end
# We are finished!
defp do_parse_rpe([], postfix, [], _), do: postfix |> Enum.reverse()
defp evaluate_postfix(postfix) do
do_evaluate_postfix(postfix, [])
end
defp do_evaluate_postfix([number | tail], number_stack) when is_integer(number) do
do_evaluate_postfix(tail, [number | number_stack])
end
defp do_evaluate_postfix([:* | tail], [number1, number2 | number_stack_tail]) do
do_evaluate_postfix(tail, [number1 * number2 | number_stack_tail])
end
defp do_evaluate_postfix([:+ | tail], [number1, number2 | number_stack_tail]) do
do_evaluate_postfix(tail, [number1 + number2 | number_stack_tail])
end
defp do_evaluate_postfix([], [number]), do: number
end
|
lib/advent20/18_operation_order.ex
| 0.786213
| 0.663778
|
18_operation_order.ex
|
starcoder
|
defmodule EpiContacts.PatientCase do
alias EpiContacts.Contact
alias EpiContacts.Parsers
@moduledoc """
utilities for working with patient case map data
"""
@minimum_age 18
@spec age(patient_case :: map()) :: pos_integer() | nil
@spec age(patient_case :: map(), as_of :: DateTime.t()) :: pos_integer() | nil
def age(patient_case, as_of \\ Timex.local()) do
case date_of_birth(patient_case) do
nil -> nil
dob -> do_age(dob, as_of)
end
end
defp do_age(dob, as_of) do
age = as_of.year - dob.year
if as_of.month < dob.month || (as_of.month == dob.month && as_of.day < dob.day),
do: age - 1,
else: age
end
def date_of_birth(patient_case),
do: patient_case |> property("dob") |> parse_date()
def properties(patient_case),
do: Map.get(patient_case, "properties", %{})
def property(patient_case, property, default \\ nil),
do: patient_case |> properties() |> Map.get(property, default)
def parse_date(nil), do: nil
def parse_date(date_string) do
case Date.from_iso8601(date_string) do
{:ok, date} -> date
{:error, _} -> nil
end
end
def domain(%{"domain" => domain}), do: domain
def domain(_), do: nil
def case_id(%{"case_id" => case_id}), do: case_id
def case_id(_), do: nil
def child_cases(%{"child_cases" => child_cases}), do: Map.values(child_cases)
def current_status(patient_case),
do: property(patient_case, "current_status")
def owner_id(patient_case),
do: property(patient_case, "owner_id")
def full_name(patient_case),
do: property(patient_case, "full_name")
def first_name(patient_case),
do: property(patient_case, "first_name")
def last_name(patient_case),
do: property(patient_case, "last_name")
def phone(patient_case),
do: property(patient_case, "phone_home")
def has_date_of_birth?(patient_case) do
!!date_of_birth(patient_case)
end
def has_phone_number?(patient_case) do
patient_case
|> property("has_phone_number")
|> is_yes?()
end
def initials(patient_case),
do: property(patient_case, "initials")
@secure_id_property "smc_id"
def secure_id(patient_case),
do: property(patient_case, @secure_id_property)
def secure_id_property, do: @secure_id_property
def smc_opt_in?(patient_case),
do: property(patient_case, "smc_opt_in") |> is_yes?()
def smc_trigger_reason(patient_case),
do: property(patient_case, "smc_trigger_reason")
def investigation?(patient_case),
do: property(patient_case, "investigation")
def investigation_name(patient_case),
do: property(patient_case, "investigation_name")
def investigation_case_id(patient_case),
do: property(patient_case, "investigation_case_id")
def investigation_id(patient_case),
do: property(patient_case, "investigation_id")
def isolation_start_date(patient_case),
do: patient_case |> property("isolation_start_date") |> parse_date()
@isolation_span 5
def isolation_end_date(patient_case),
do:
(isolation_start_date(patient_case) || new_lab_result_specimen_collection_date(patient_case))
|> Timex.shift(days: @isolation_span)
def release_from_isolation_date(patient_case, opts \\ []) do
shift = opts[:shift_days] || 1
patient_case |> isolation_end_date() |> Timex.shift(days: shift)
end
def new_lab_result_specimen_collection_date(patient_case),
do: patient_case |> property("new_lab_result_specimen_collection_date") |> parse_date()
def start_of_infectious_period(patient_case),
do:
(isolation_start_date(patient_case) || new_lab_result_specimen_collection_date(patient_case))
|> Timex.shift(days: -2)
def end_of_infectious_period(patient_case),
do: patient_case |> start_of_infectious_period() |> Timex.shift(days: @isolation_span)
def infectious_period(patient_case),
do: Date.range(start_of_infectious_period(patient_case), end_of_infectious_period(patient_case))
def case_type(commcare_case), do: commcare_case |> property("case_type") |> to_string() |> String.downcase()
def is_contact?(commcare_case), do: case_type(commcare_case) == "contact"
def is_patient?(commcare_case), do: case_type(commcare_case) == "patient"
def is_minor?(patient_case, as_of \\ Timex.local()),
do: age(patient_case, as_of) < @minimum_age
def minimum_age, do: @minimum_age
def patient_type(commcare_case), do: commcare_case |> property("patient_type") |> to_string() |> String.downcase()
defp generate_random(n),
do: Enum.map(0..(n - 1), fn _ -> [?0..?9] |> Enum.concat() |> Enum.random() end)
def generate_contact_id(patient_case) do
case doh_mpi_id(patient_case) do
nil -> 6 |> generate_random() |> to_string()
doh_mpi -> "#{doh_mpi}-#{generate_random(6)}"
end
end
def doh_mpi_id(patient) do
case property(patient, "doh_mpi_id") do
nil -> nil
"" -> nil
mpi_id -> mpi_id
end
end
def primary_language(patient_case),
do: property(patient_case, "primary_language", "en")
def is_stub?(patient_case),
do: patient_case |> property("stub") |> is_yes?()
def transfer_status(patient_case),
do: property(patient_case, "transfer_status")
def external_id(patient_case) do
with domain when is_binary(domain) <- domain(patient_case),
case_id when is_binary(case_id) <- case_id(patient_case) do
"gid://commcare/domain/#{domain}/case/#{case_id}"
else
_ -> nil
end
end
def existing_contacts(patient_case) do
patient_case
|> child_cases()
|> Enum.filter(&is_contact?/1)
|> Enum.map(fn contact ->
%Contact{
first_name: first_name(contact),
last_name: last_name(contact),
phone: phone(contact)
}
end)
end
def days_between_open_and_modified(patient_case) do
with date_opened when not is_nil(date_opened) <-
property(patient_case, "date_opened") |> Parsers.datetime_with_or_without_zone(),
server_date_modified when not is_nil(server_date_modified) <-
patient_case["server_date_modified"] |> Parsers.datetime_with_or_without_zone() do
difference = DateTime.diff(server_date_modified, date_opened)
difference / (60 * 60 * 24)
else
_ -> :error
end
end
def interview_attempted_or_completed?(patient_case) do
property(patient_case, "interview_disposition") in [
"invalid_phone_number",
"agreed_to_participate",
"deceased",
"med_psych",
"language_barrier",
"incarcerated",
"already_investigated",
"facility_notification"
]
end
defp is_yes?("yes"), do: true
defp is_yes?(_), do: false
end
|
lib/epi_contacts/patient_case.ex
| 0.581184
| 0.414247
|
patient_case.ex
|
starcoder
|
defmodule Authorizir do
@moduledoc ~S"""
Ecto-backed Authorization Library for Elixir Applications
See [README](README.md) for a description of the mathematical model used as
the basis of this system.
## Usage
Imagine you are creating an app that handles online ordering.
First, create your app's authorization module, configuring it with your
application repository:
```elixir
defmodule Auth do
use Authorizir, repo: Repo
end
```
Users of the application might be organized into a hierarchy as follows (note
that an employee can also be a customer):
```mermaid
graph TD
*[Users *] --> E[Employees]
* --> C[Customers]
E --> A[Admins]
E --> M[Marketing]
E --> F[Finance]
E --> S[Shipping and Fulfillment]
A --> Bob
M --> Bob
M --> Jane
F --> Amanda
S --> George
S --> Beth
C --> Amanda
C --> George
C --> John
```
We have two types of Subject entities represented here; "Organizational Units"
represent groups of users such as internal departments and customers, while
"Users" represent the individual system accounts. Each of these are
represented with Ecto schemas in our app, and we include the
`Authorizir.Subject` behavior in the modules, so that they can participate in
the Subject hierarcy.
First we add the necessary migrations by running `mix ecto.gen.migraion
add_org_units_and_users` and editing the resulting migration file:
```elixir
defmodule AddOrgUnitsAndUsers do
use Ecto.Migration
import Authorizir.Migrations, only: [apply_subject_hierarchy: 2]
create table("org_units") do
add :name, :string, null: false
timestamps()
end
apply_subject_hierarchy("org_units", id_field: :id)
create table("org_units") do
add :name, :string, null: false
timestamps()
end
apply_subject_hierarchy("org_units", id_field: :id)
end
```
```elixir
defmodule OrgUnit do
use Ecto.Schema
use Authorizir.Subject
schema "org_units" do
field :name, :string
end
end
defmodule User do
use Ecto.Schema
use Authorizir.Subject
schema "org_units" do
field :name, :string
end
end
```
You can create the hierarchy as:
```elixir
{:ok, employees} = %OrgUnit{name: "Employees"} |> Repo.insert()
{:ok, customers} = %OrgUnit{name: "Customers"} |> Repo.insert()
{:ok, admins} = %OrgUnit{name: "Admins"} |> Repo.insert()
:ok = Auth.add_child(employees.id, admins.id, Subject)
{:ok, marketing} = %OrgUnit{name: "Marketing"} |> Repo.insert()
:ok = Auth.add_child(employees.id, marketing.id, Subject)
{:ok, finance} = %OrgUnit{name: "Finance"} |> Repo.insert()
:ok = Auth.add_child(employees.id, finance.id, Subject)
{:ok, shipping} = %OrgUnit{name: "Shipping and Fulfillment"} |> Repo.insert()
:ok = Auth.add_child(employees.id, shipping.id, Subject)
{:ok, bob} = %User{name: "Bob"} |> Repo.insert()
:ok = Auth.add_child(admins.id, bob.id, Subject)
:ok = Auth.add_child(marketing.id, bob.id, Subject)
{:ok, jane} = %User{name: "Jane"} |> Repo.insert()
:ok = Auth.add_child(marketing.id, jane.id, Subject)
{:ok, amanda} = %User{name: "Amanda"} |> Repo.insert()
:ok = Auth.add_child(finance.id, amanda.id, Subject)
:ok = Auth.add_child(customers.id, amanda.id, Subject)
{:ok, george} = %User{name: "George"} |> Repo.insert()
:ok = Auth.add_child(shipping.id, george.id, Subject)
:ok = Auth.add_child(customers.id, george.id, Subject)
{:ok, beth} = %User{name: "Beth"} |> Repo.insert()
:ok = Auth.add_child(shipping.id, beth.id, Subject)
{:ok, john} = %User{name: "John"} |> Repo.insert()
:ok = Auth.add_child(customers.id, john.id, Subject)
```
"""
alias Authorizir.{AuthorizationRule, Object, Permission, Subject}
import Authorizir.ErrorHelpers, only: [errors_on: 2]
import Ecto.Query, only: [from: 2]
@callback register_subject(id :: binary(), description :: String.t()) ::
:ok | {:error, reason :: atom()}
def register_subject(repo, id, description) do
case Subject.new(id, description) |> repo.insert() do
{:ok, _subject} ->
:ok
{:error, changeset} ->
cond do
"can't be blank" in errors_on(changeset, :ext_id) ->
{:error, :id_is_required}
"can't be blank" in errors_on(changeset, :description) ->
{:error, :description_is_required}
true ->
raise "Unanticipated error while adding Subject: #{inspect(changeset)}"
end
end
end
@callback grant_permission(
subject_id :: binary(),
object_id :: binary(),
permission_id :: binary()
) :: :ok | {:error, reason :: atom()}
def grant_permission(repo, subject_id, object_id, permission_id) do
create_rule(repo, subject_id, object_id, permission_id, :+)
end
@callback revoke_permission(
subject_id :: binary(),
object_id :: binary(),
permission_id :: binary()
) :: :ok | {:error, reason :: atom()}
def revoke_permission(repo, subject_id, object_id, permission_id) do
delete_rule(repo, subject_id, object_id, permission_id, :+)
end
@callback deny_permission(
subject_id :: binary(),
object_id :: binary(),
permission_id :: binary()
) :: :ok | {:error, reason :: atom()}
def deny_permission(repo, subject_id, object_id, permission_id) do
create_rule(repo, subject_id, object_id, permission_id, :-)
end
@callback allow_permission(
subject_id :: binary(),
object_id :: binary(),
permission_id :: binary()
) :: :ok | {:error, reason :: atom()}
def allow_permission(repo, subject_id, object_id, permission_id) do
delete_rule(repo, subject_id, object_id, permission_id, :-)
end
@callback add_child(parent_id :: binary(), child_id :: binary(), type :: module()) ::
:ok | {:error, reason :: atom()}
def add_child(repo, parent_id, child_id, type) do
with {:parent, parent} when not is_nil(parent) <-
{:parent, repo.get_by(type, ext_id: parent_id)},
{:child, child} when not is_nil(child) <- {:child, repo.get_by(type, ext_id: child_id)},
{:edge_created, _edge} <- type.create_edge(parent, child) |> repo.dagex_update() do
:ok
else
{:parent, nil} -> {:error, :invalid_parent}
{:child, nil} -> {:error, :invalid_parent}
{:error, _reason} = error -> error
end
end
@callback remove_child(parent_id :: binary(), child_id :: binary(), type :: module()) ::
:ok | {:error, reason :: atom()}
def remove_child(repo, parent_id, child_id, type) do
with {:parent, parent} when not is_nil(parent) <-
{:parent, repo.get_by(type, ext_id: parent_id)},
{:child, child} when not is_nil(child) <- {:child, repo.get_by(type, ext_id: child_id)},
{:edge_removed, _edge} <- type.remove_edge(parent, child) |> repo.dagex_update() do
:ok
else
{:parent, nil} -> {:error, :invalid_parent}
{:child, nil} -> {:error, :invalid_parent}
{:error, _reason} = error -> error
end
end
@callback permission_granted?(
subject_id :: binary(),
object_id :: binary(),
permission_id :: binary()
) :: :granted | :denied | {:error, reason :: atom()}
def permission_granted?(repo, subject_id, object_id, permission_id) do
with {:sop, {:ok, subject, object, permission}} <-
{:sop, sop_nodes(repo, subject_id, object_id, permission_id)} do
cond do
authorization_rule_applies?(repo, subject, object, permission, :-) -> :denied
authorization_rule_applies?(repo, subject, object, permission, :+) -> :granted
true -> :denied
end
else
{:sop, error} -> error
end
end
defp authorization_rule_applies?(repo, subject, object, permission, :-) do
from([r, s, o] in authorization_rules_for(subject, object),
join: p in subquery(Permission.with_descendants(permission)),
on: p.id == r.permission_id,
where: r.rule_type == :-
)
|> repo.exists?()
end
defp authorization_rule_applies?(repo, subject, object, permission, :+) do
from([r, s, o] in authorization_rules_for(subject, object),
join: p in subquery(Permission.with_ancestors(permission)),
on: p.id == r.permission_id,
where: r.rule_type == :+
)
|> repo.exists?()
end
defp authorization_rules_for(subject, object) do
from(r in AuthorizationRule,
join: s in subquery(Subject.with_ancestors(subject)),
on: s.id == r.subject_id,
join: o in subquery(Object.with_ancestors(object)),
on: o.id == r.object_id
)
end
defp sop_ids(repo, subject_ext_id, object_ext_id, permission_ext_id) do
case sop_nodes(repo, subject_ext_id, object_ext_id, permission_ext_id) do
{:ok, subject, object, permission} -> {:ok, subject.id, object.id, permission.id}
result -> result
end
end
defp sop_nodes(repo, subject_ext_id, object_ext_id, permission_ext_id) do
with {:subject, %{} = subject} <-
{:subject, repo.get_by(Subject, ext_id: subject_ext_id)},
{:object, %{} = object} <-
{:object, repo.get_by(Object, ext_id: object_ext_id)},
{:permission, %{} = permission} <-
{:permission, repo.get_by(Permission, ext_id: permission_ext_id)} do
{:ok, subject, object, permission}
else
{participant, nil} -> {:error, "invalid_#{participant}" |> String.to_atom()}
end
end
defp create_rule(repo, subject_id, object_id, permission_id, rule_type) do
with {:sop, {:ok, subject_id, object_id, permission_id}} <-
{:sop, sop_ids(repo, subject_id, object_id, permission_id)},
{:existing_rule, nil} <-
{:existing_rule,
repo.get_by(AuthorizationRule,
subject_id: subject_id,
object_id: object_id,
permission_id: permission_id
)} do
case AuthorizationRule.new(subject_id, object_id, permission_id, rule_type)
|> repo.insert() do
{:ok, _rule} ->
:ok
{:error, changeset} ->
cond do
true ->
raise "Unanticipated error occured while creating Authorization Rule. #{inspect(changeset)}"
end
end
else
{:sop, error} -> error
{:existing_rule, %{rule_type: ^rule_type}} -> :ok
{:existing_rule, _rule} -> {:error, :conflicting_rule_type}
end
end
defp delete_rule(repo, subject_id, object_id, permission_id, rule_type) do
with {:sop, {:ok, subject_id, object_id, permission_id}} <-
{:sop, sop_ids(repo, subject_id, object_id, permission_id)} do
from(r in AuthorizationRule,
where:
r.subject_id == ^subject_id and r.object_id == ^object_id and
r.permission_id == ^permission_id and r.rule_type == ^rule_type
)
|> repo.delete_all()
:ok
else
{:sop, error} -> error
end
end
defmacro __using__(opts) do
repo = Keyword.fetch!(opts, :repo)
quote bind_quoted: [repo: repo] do
@authorizir_repo repo
@behaviour Authorizir
@impl Authorizir
def grant_permission(subject_id, object_id, permission_id),
do: Authorizir.grant_permission(@authorizir_repo, subject_id, object_id, permission_id)
@impl Authorizir
def revoke_permission(subject_id, object_id, permission_id),
do: Authorizir.revoke_permission(@authorizir_repo, subject_id, object_id, permission_id)
@impl Authorizir
def deny_permission(subject_id, object_id, permission_id),
do: Authorizir.deny_permission(@authorizir_repo, subject_id, object_id, permission_id)
@impl Authorizir
def allow_permission(subject_id, object_id, permission_id),
do: Authorizir.allow_permission(@authorizir_repo, subject_id, object_id, permission_id)
@impl Authorizir
def permission_granted?(subject_id, object_id, permission_id),
do: Authorizir.permission_granted?(@authorizir_repo, subject_id, object_id, permission_id)
@impl Authorizir
def add_child(parent_id, child_id, type),
do: Authorizir.add_child(@authorizir_repo, parent_id, child_id, type)
@impl Authorizir
def remove_child(parent_id, child_id, type),
do: Authorizir.remove_child(@authorizir_repo, parent_id, child_id, type)
@impl Authorizir
def register_subject(id, description),
do: Authorizir.register_subject(@authorizir_repo, id, description)
end
end
end
|
lib/authorizir.ex
| 0.810591
| 0.799442
|
authorizir.ex
|
starcoder
|
defmodule Versioning.Changelog do
@moduledoc """
Creates changelogs for schemas.
The changelog is composed of a list of maps that describe the history of
the schema. For example:
[
%{
version: "1.1.0",
changes: [
%{type: Foo, descriptions: ["Changed this.", "Changed that."]}
]
},
%{
version: "1.0.0",
changes: [
%{type: Foo, descriptions: ["Changed this.", "Changed that."]}
]
}
]
The descriptions associated with each change can be attributed via the
`@desc` module attribute on a change module. Please see `Versioning.Change`
for more details.
Formatters can be used to turn the raw changelog into different formats. Please
see the `Versioning.Changelog.Formatter` behaviour for more details.
The `Versioning.Changelog.Markdown` formatter is included with this package.
"""
@type change :: %{type: module(), descriptions: [binary()]}
@type version :: %{version: binary(), changes: [change()]}
@type t :: [version()]
@doc """
Builds a changelog of the schema.
## Options
* `:version` - A specific version within the changelog.
* `:type` - A specific type within the specified version.
* `:formatter` - A module that adheres to the `Versioning.Changelog.Formatter`
behaviour.
## Examples
Versioning.Changelog.build(MySchema, formatter: Versioning.Changelog.Markdown)
"""
@spec build(Versioning.Schema.t()) :: Versioning.Changelog.t()
def build(schema, opts \\ []) do
version = Keyword.get(opts, :version)
type = Keyword.get(opts, :type)
formatter = Keyword.get(opts, :formatter)
schema
|> do_build()
|> do_fetch(version, type)
|> do_format(formatter)
end
defp do_build(schema) do
Enum.reduce(schema.__schema__(:down), [], fn {_version, raw_version, types}, changelog ->
add_version(changelog, raw_version, types)
end)
end
defp add_version(changelog, raw_version, types) do
changelog ++ [%{version: raw_version, changes: build_changes(types)}]
end
defp build_changes(types) do
Enum.reduce(types, [], fn {type, changes}, result ->
add_change(result, type, changes)
end)
end
defp add_change(current, type, changes) do
current ++ [%{type: type, descriptions: build_descriptions(changes)}]
end
defp build_descriptions(changes) do
Enum.reduce(changes, [], fn {change, _init}, descriptions ->
descriptions ++ [change.__change__(:desc)]
end)
end
defp do_fetch(changelog, nil, nil) do
changelog
end
defp do_fetch(_changelog, nil, type) when is_atom(type) do
raise Versioning.ChangelogError, """
cannot fetch a changelog type without a version.
type: #{inspect(type)}
"""
end
defp do_fetch(changelog, version, nil) do
do_get_version(changelog, version)
end
defp do_fetch(changelog, version, type) do
changelog
|> do_get_version(version)
|> do_get_version_type(type)
end
defp do_get_version(changelog, version) do
Enum.find(changelog, &(Map.get(&1, :version) == to_string(version))) ||
invalid_version!(version)
end
defp do_get_version_type(version, type) do
version
|> Map.get(:changes)
|> Enum.find(&(Map.get(&1, :type) == type))
end
defp do_format(changelog, nil) do
changelog
end
defp do_format(changelog, formatter) do
formatter.format(changelog)
end
defp invalid_version!(version) do
raise Versioning.ChangelogError, """
version cannot be found in schema.
version: #{inspect(version)}
"""
end
end
|
lib/versioning/changelog.ex
| 0.875734
| 0.503174
|
changelog.ex
|
starcoder
|
defmodule Alchemetrics do
alias Alchemetrics.Event
alias Alchemetrics.Producer
@moduledoc """
Data Report Interface
All reported values follow the same flow:
1. They are stored in a dataset and stay there for a configurable time span;
2. After that time span the dataset is measured. Various measurements are made on the dataset;
3. The measurement results are sent to some `Alchemetrics.CustomBackend`;
4. The dataset is reset.
This document provides a detailed explanation about each one of those steps.
## Value Report
All collected values are stored in datasets. The reported value is identified by a name or by some metadata, which determines the set of data at which this value will be stored. Therefore, values with metadata or similar names are stored in the same dataset.
If a collected value can not be stored in any existing dataset, a new one is created.
Each value accumulated in the dataset will stay there for a configurable time interval. At the end of this interval, the dataset will be measured and reset.
## Data Set Measurement
Measuring a dataset is to perform a certain calculation on the values accumulated in it. The types of measurement include the average calculation, percentiles and the sum of these values. Each of the measurements generates a different value, which is sent to the `Alchemetrics.CustomBackend` configured in the application. After that, the dataset is reset.
When reporting a value through the `report/2` function, the following measurements will be made:
- `:p99`: The 99th percentile of the dataset.
- `:p95`: The 95th percentile of the dataset.
- `:avg`: The average of the dataset.
- `:min`: The minimum value at the dataset.
- `:max`: The maximum value at the dataset.
- `:last_interval`: The sum of all dataset values on the last time interval.
- `:total`: The total sum of the dataset since the application boot.
### Report Examples:
```elixir
# Making two reports for the same dataset in the same time interval
Alchemetrics.report(1, response_time_on: %{controller: UsersController, action: :info})
Alchemetrics.report(99, response_time_on: %{controller: UsersController, action: :info})
# Each measurement is made in the dataset and the result is sent to the backends
# In this example the backend only prints the results of the measurements in the console
%{datapoint: :max, response_time_on: %{action: :info, controller: UsersController}, value: 99}
%{datapoint: :min, response_time_on: %{action: :info, controller: UsersController}, value: 1}
%{datapoint: :avg, response_time_on: %{action: :info, controller: UsersController}, value: 50}
%{datapoint: :p95, response_time_on: %{action: :info, controller: UsersController}, value: 99}
%{datapoint: :p99, response_time_on: %{action: :info, controller: UsersController}, value: 99}
%{datapoint: :last_interval, response_time_on: %{action: :info, controller: UsersController}, value: 100}
%{datapoint: :total, response_time_on: %{action: :info, controller: UsersController}, value: 100}
```
When reporting a value through the `increment/1` or `increment_by/2` functions, the following measurements will be applied:
- `:last_interval`: The sum of all dataset values on the last time interval.
- `:total`: The total sum of the dataset since the application boot.
### Increment Examples:
```elixir
# Collecting 3 requests within the same time range:
Alchemetrics.increment(requests_on: %{controller: UsersController, action: :info})
Alchemetrics.increment(requests_on: %{controller: UsersController, action: :info})
Alchemetrics.increment(requests_on: %{controller: UsersController, action: :info})
# The dataset is measured and the value is sent to the backend.
# In this example the backend only prints the results of the measurements in the console
# After that, the dataset is reset.
# The ConsoleBackend will print each one of the measurements
# Printing :last_interval
%{datapoint: :last_interval, requests_on: %{action: :info, controller: UsersController}, value: 3}
# Printing :total
%{datapoint: :total, requests_on: %{action: :info, controller: UsersController}, value: 3}
# No increments were made on the last interval.
# So, when the measurement is made, the last interval sum will be zero, but the total is kept at 3:
%{datapoint: :last_interval, requests_on: %{action: :info, controller: UsersController}, value: 0}
%{datapoint: :total, requests_on: %{action: :info, controller: UsersController}, value: 3}
```
"""
@doc """
Reports a specific value or the execution time of a function.
The following measures will be applied:
- `:p99`: The 99th percentile of the dataset.
- `:p95`: The 95th percentile of the dataset.
- `:avg`: The average of the dataset.
- `:min`: The minimum value at the dataset.
- `:max`: The maximum value at the dataset.
- `:last_interval`: Like in the Increment Data Set, the sum on the last interval is also available here.
- `:total`: Like in the Increment Data Set, the total sum since the first report is also available here.
## Reporting a specific value
### Params:
- `value`: The collected value. Can be any integer
- `name`: Identifies the dataset where this value should be stored. Can be an `atom` or a `KeywordList`.
### Usage:
Reports are useful to report generic values like a response time for a given route. Therefore, you could create a Plug that reports the response time of a certain route:
```elixir
defmodule MyApp.Plugs.RequestMeasurer do
@behaviour Plug
def init(opts \\ []), do: opts
def call(conn, opts) do
start = System.monotonic_time()
Plug.Conn.register_before_send(conn, fn conn ->
diff = System.convert_time_unit(System.monotonic_time() - start, :native, :micro_seconds)
Alchemetrics.report(diff, request_time: %{
method: conn.method,
path: conn.request_path,
status: conn.status
})
conn
end)
end
end
# You can track any request by pluging it at my_app_web/endpoint.ex
defmodule MyApp.Endpoint do
use Phoenix.Endpoint, otp_app: :my_app
plug MyApp.Plugs.RequestMeasurer
...
end
```
## Reporting a function execution time
### Params:
- `name`: Identifies the dataset where this value should be stored. Can be an `atom` or a `KeywordList`.
- `function`: Behaviour that will be executed and measured. Can be any function
### Return:
The return of the function after being executed.
### Usage:
```elixir
metric_name = [
processing_time: %{
processor: MyApp.MatrixOperator,
operation: "multiplication",
}
]
Alchemetrics.report(metric_name, fn ->
MyApp.MatrixOperator.multiply(matrix1, matrix2)
end)
```
"""
def report(metadata, function) when is_function(function) do
{execution_time_in_microseconds, returned_value} = :timer.tc(function)
Alchemetrics.report(execution_time_in_microseconds, metadata)
returned_value
end
def report(value, name) when is_atom(name), do: report(value, [name: name])
def report(value, metadata) do
create_event(value, metadata, Alchemetrics.Exometer.Datapoints.histogram)
|> Producer.enqueue
end
@doc """
Similar to `increment/1`, but accept any value other than 1.
The following measures will be applied:
- `:last_interval`: The sum of all dataset values on the last time interval.
- `:total`: The total sum of the dataset since the application boot.
## Params:
- `value`: The value to be collected. Can be any integer.
- `name`: Identifies the dataset where this value should be stored. Can be an `atom` or a `KeywordList`.
"""
def increment_by(value, name) when is_atom(name), do: increment_by(value, [name: name])
def increment_by(value, metadata) do
create_event(value, metadata, Alchemetrics.Exometer.Datapoints.spiral)
|> Producer.enqueue
end
@doc """
Reports the value 1.
The following measures will be applied:
- `:last_interval`: The sum of all dataset values on the last time interval.
- `:total`: The total sum of the dataset since the application boot.
## Params:
- `name`: Identifies the dataset where this value should be stored. Can be an `atom` or a `KeywordList`.
## Usage:
Increments are useful, for example, to count the number of requests on a particular route in a Phoenix application.
```elixir
defmodule MyAppWeb.UsersController do
use MyAppWeb, :controller
plug :count_request
def info(conn, _params), do: json(conn, %{name: "Some User", email: "<EMAIL>"})
def count_request(conn, _) do
Alchemetrics.increment(requests_at: %{
controller: Phoenix.Controller.controller_module(conn),
action: Phoenix.Controller.action_name(conn)
})
conn
end
end
```
"""
def increment(name) when is_atom(name), do: increment_by(1, [name: name])
def increment(metadata), do: increment_by(1, metadata)
defp create_event(value, metadata, datapoints) do
%{}
|> Map.put(:metadata, metadata)
|> Map.put(:datapoints, datapoints)
|> Map.put(:value, value)
|> Event.create
end
end
|
lib/alchemetrics.ex
| 0.942797
| 0.928214
|
alchemetrics.ex
|
starcoder
|
defmodule SimpleMarkdown do
@moduledoc """
Converts markdown into the specified rendered output.
While this step is completely optional, if you need to configure
any rules the first thing to do is usually to generate the base
rule config.
$ mix simple_markdown.rules.new
And then importing the config in your config.exs:
import_config "simple_markdown_rules.exs"
This config can be customized as you see fit. With new rules added,
and other rules removed or modified. Depending on the changes made
this may require some additional steps for things to work the way
you want. e.g. If you add or change the way a rule type fundamentally
works, you'll need to add or override the required rendering step
for that type. Details for this can be found in renderer protocols.
Example
-------
#config/simple_markdown_rules.exs
#add the following rule to our ruleset
lol: %{ match: ~r/\\Alol/, rules: [] }
#lib/lol_renderer.ex
#add a renderer for the HTML renderer for our "lol" rule
defimpl SimpleMarkdown.Renderer.HTML, for: SimpleMarkdown.Attribute.Lol do
def render(_), do: "<img src=\\"lolcat.jpg\\">"
end
#usage:
SimpleMarkdown.convert("#lol") #=> "<h1><img src=\\"lolcat.jpg\\"></h1>"
Additionally new renderers can be created. How these new renderers should
be implemented is left up to you depending on how you'll provide input.
If you use the standard `convert/2` then the input will
be parsed, then the AST will be converted to these structs, and then
that will be passed to your renderer. Alternatively you may call
`SimpleMarkdown.Parser.parse/1` directly and then manipulate that AST
how you see fit, and pass that to your renderer.
"""
@type attribute :: %{ :__struct__ => atom, :input => [attribute | String.t], :option => any }
@doc """
Convert the text input into the rendered output. The default parser
used is the one provided in the rules config, and the default
renderer is the HTML renderer.
"""
@spec convert(String.t, [parser: [Parsey.rule], render: (Stream.t | [SimpleMarkdown.attribute | String.t] -> String.t)]) :: String.t
@spec convert(String.t, [parser: [Parsey.rule], render: (Stream.t | [SimpleMarkdown.attribute | String.t] -> Stream.t)]) :: Stream.t
def convert(input, options \\ []) do
options = Keyword.merge([parser: SimpleMarkdown.Parser.rules, render: &SimpleMarkdown.Renderer.HTML.render/1], options)
SimpleMarkdown.Parser.parse(input, options[:parser]) |> ast_to_structs |> options[:render].()
end
@doc false
@deprecated "Use convert/2 instead"
@spec to_html(String.t, [parser: [Parsey.rule], render: ([SimpleMarkdown.attribute | String.t] -> String.t)]) :: String.t
def to_html(input, options \\ [render: &SimpleMarkdown.Renderer.HTML.render/1]), do: convert(input, options)
@doc """
Convert the AST into structs to allow for the rendering protocols
to be applied to individual attributes.
"""
@spec ast_to_structs([Parsey.ast]) :: [attribute | String.t]
@spec ast_to_structs(Stream.t) :: Stream.t
def ast_to_structs(ast) when is_list(ast), do: Enum.map(ast, &node_to_struct(&1))
def ast_to_structs(ast), do: Stream.map(ast, &node_to_struct(&1))
@doc false
@spec node_to_struct(Parsey.ast | String.t) :: attribute | String.t
defp node_to_struct({ name, ast }), do: %{ __struct__: atom_to_module(name), input: ast_to_structs(ast) }
defp node_to_struct({ name, ast, options }), do: %{ __struct__: atom_to_module(name), input: ast_to_structs(ast), option: options }
defp node_to_struct(non_node), do: non_node
@doc false
@spec atom_to_module(atom) :: atom
def atom_to_module(name), do: Module.concat(SimpleMarkdown.Attribute, format_as_module(to_string(name)))
@doc """
Format a string to follow the module naming convention.
"""
@spec format_as_module(String.t) :: String.t
def format_as_module(name) do
name
|> String.split(".")
|> Enum.map(fn module ->
String.split(module, "_") |> Enum.map(&String.capitalize(&1)) |> Enum.join
end)
|> Enum.join(".")
end
@doc """
Create a child module relative to parent.
"""
@spec child_module!(String.t | atom, String.t | atom) :: atom
def child_module!(parent, child), do: Module.safe_concat(parent, format_as_module(to_string(child)))
end
|
lib/simple_markdown.ex
| 0.819244
| 0.403361
|
simple_markdown.ex
|
starcoder
|
defmodule WaitForIt do
@moduledoc ~S"""
WaitForIt provides macros for various ways to wait for things to happen.
Since most Elixir systems are highly concurrent there must be a way to coordinate and synchronize
the processes in the system. While the language provides features (such as `Process.sleep/1` and
`receive`/`after`) that can be used to implement such synchronization, they are inconvenient to
use for this purpose. `WaitForIt` builds on top of these language features to provide convenient
and easy-to-use facilities for synchronizing concurrent activities. While this is likely most
useful for test code in which tests must wait for concurrent or asynchronous activities to
complete, it is also useful in any scenario where concurrent processes must coordinate their
activity. Examples include asynchronous event handling, producer-consumer processes, and
time-based activity.
There are three distinct forms of waiting provided:
1. The `wait` macro waits until a given expression evaluates to a truthy value.
2. The `case_wait` macro waits until a given expression evaluates to a value that
matches any one of the given case clauses (looks like an Elixir `case` expression).
3. The `cond_wait` macro waits until any one of the given expressions evaluates to a truthy
value (looks like an Elixir `cond` expression).
All three forms accept the same set of options to control their behavior:
* `:timeout` - the amount of time to wait (in milliseconds) before giving up
* `:frequency` - the polling frequency (in milliseconds) at which to re-evaluate conditions
* `:signal` - disable polling and use a condition variable of the given name instead
* `:pre_wait` - wait for the given number of milliseconds before evaluating conditions for the first time
The `:signal` option warrants further explanation. By default, all three forms of waiting use
polling to periodically re-evaluate conditions to determine if waiting should continue. The
frequency of polling is controlled by the `:frequency` option. However, if the `:signal` option
is given it disables polling altogether. Instead of periodically re-evaluating conditions at a
particular frequency, a _condition variable_ is used to signal when conditions should be
re-evaluated. It is expected that the `signal` macro will be used to unblock the waiting code
in order to re-evaluate conditions. For example, imagine a typical producer-consumer problem in
which a consumer process waits for items to appear in some buffer while a separate producer
process occasionally place items in the buffer. In this scenario, the consumer process might use
the `wait` macro with the `:signal` option to wait until there are some items in the buffer and
the producer process would use the `signal` macro to tell the consumer that it might be time for
it to check the buffer again.
```
# CONSUMER
# assume the existence of a `buffer_size` function
WaitForIt.wait buffer_size() >= 4, signal: :wait_for_buffer
```
```
# PRODUCER
# put some things in buffer, then:
WaitForIt.signal(:wait_for_buffer)
```
Notice that the same condition variable name `:wait_for_buffer` is used in both cases. It is
important to note that when using condition variables for signaling like this, both the `wait`
invocation and the `signal` invocation should be in the same Elixir module. This is because
`WaitForIt` uses the calling module as a namespace for condition variable names to prevent
accidental name collisions with other registered processes in the application. Also note that
just because a condition variable has been signalled does not necessarily mean that any waiters
on that condition variable can stop waiting. Rather, a signal indicates that waiters should
re-evaluate their waiting conditions to determine if they should continue to wait or not.
"""
alias WaitForIt.Helpers
@doc ~S"""
Wait until the given `expression` evaluates to a truthy value.
Returns `{:ok, value}` or `{:timeout, timeout_milliseconds}`.
## Options
See the WaitForIt module documentation for further discussion of these options.
* `:timeout` - the amount of time to wait (in milliseconds) before giving up
* `:frequency` - the polling frequency (in milliseconds) at which to re-evaluate conditions
* `:signal` - disable polling and use a condition variable of the given name instead
* `:pre_wait` - wait for the given number of milliseconds before evaluating conditions for the first time
## Examples
Wait until the top of the hour:
WaitForIt.wait Time.utc_now.minute == 0, frequency: 60_000, timeout: 60_000 * 60
Wait up to one minute for a particular record to appear in the database:
case WaitForIt.wait Repo.get(Post, 42), frequency: 1000, timeout: 60_000 do
{:ok, data} -> IO.inspect(data)
{:timeout, timeout} -> IO.puts("Gave up after #{timeout} milliseconds")
end
"""
defmacro wait(expression, opts \\ []) do
frequency = Keyword.get(opts, :frequency, 100)
timeout = Keyword.get(opts, :timeout, 5_000)
condition_var = Keyword.get(opts, :signal, nil)
pre_wait = Keyword.get(opts, :pre_wait, 0)
quote do
require WaitForIt.Helpers
Helpers.pre_wait(unquote(pre_wait))
Helpers.wait(
Helpers.make_function(unquote(expression)),
unquote(frequency),
unquote(timeout),
Helpers.localized_name(unquote(condition_var))
)
end
end
@doc ~S"""
Wait until the given `expression` matches one of the case clauses in the given block.
Returns the value of the matching clause, the value of the optional `else` clause,
or a tuple of the form `{:timeout, timeout_milliseconds}`.
The `do` block passed to this macro must be a series of case clauses exactly like a built-in
Elixir `case` expression. Just like a `case` expression, the clauses will attempt to be matched
from top to bottom and the first one that matches will provide the resulting value of the
expression. The difference with `case_wait` is that if none of the clauses initially matches it
will wait and periodically re-evaluate the clauses until one of them does match or a timeout
occurs.
An optional `else` clause may also be used to provide the value in case of a timeout. If an
`else` clause is provided and a timeout occurs, then the `else` clause will be evaluated and
the resulting value of the `else` clause becomes the value of the `case_wait` expression. If no
`else` clause is provided and a timeout occurs, then the value of the `case_wait` expression is a
tuple of the form `{:timeout, timeout_milliseconds}`.
The optional `else` clause may also take the form of match clauses, such as those in a case
expression. In this form, the `else` clause can match on the final value of the expression that
was evaluated before the timeout occurred. See the examples below for an example of this.
## Options
See the WaitForIt module documentation for further discussion of these options.
* `:timeout` - the amount of time to wait (in milliseconds) before giving up
* `:frequency` - the polling frequency (in milliseconds) at which to re-evaluate conditions
* `:signal` - disable polling and use a condition variable of the given name instead
* `:pre_wait` - wait for the given number of milliseconds before evaluating conditions for the first time
## Examples
Wait until queue has at least 5 messages, then return them:
WaitForIt.case_wait Queue.get_messages(queue), timeout: 30_000, frequency: 100 do
messages when length(messages) > 4 -> messages
else
# If after 30 seconds we still don't have 5 messages, just return the messages we do have.
messages -> messages
end
A thermostat that keeps temperature in a small range:
def thermostat(desired_temperature) do
WaitForIt.case_wait get_current_temperature() do
temp when temp > desired_temperature + 2 ->
turn_on_air_conditioning()
temp when temp < desired_temperature - 2 ->
turn_on_heat()
end
thermostat(desired_temperature)
end
Ring the church bells every 15 minutes:
def church_bell_chimes do
count = WaitForIt.case_wait Time.utc_now.minute, frequency: 60_000, timeout: 60_000 * 60 do
15 -> 1
30 -> 2
45 -> 3
0 -> 4
end
IO.puts(String.duplicate(" ding ding ding dong ", count))
church_bell_chimes()
end
"""
defmacro case_wait(expression, opts \\ [], blocks) do
frequency = Keyword.get(opts, :frequency, 100)
timeout = Keyword.get(opts, :timeout, 5_000)
condition_var = Keyword.get(opts, :signal)
do_block = Keyword.get(blocks, :do)
else_block = Keyword.get(blocks, :else)
pre_wait = Keyword.get(opts, :pre_wait, 0)
quote do
require WaitForIt.Helpers
Helpers.pre_wait(unquote(pre_wait))
Helpers.case_wait(
Helpers.make_function(unquote(expression)),
unquote(frequency),
unquote(timeout),
Helpers.localized_name(unquote(condition_var)),
Helpers.make_case_function(unquote(do_block)),
Helpers.make_else_function(unquote(else_block))
)
end
end
@doc ~S"""
Wait until one of the expressions in the given block evaluates to a truthy value.
Returns the value corresponding with the matching expression, the value of the optional `else`
clause, or a tuple of the form `{:timeout, timeout_milliseconds}`.
The `do` block passed to this macro must be a series of expressions exactly like a built-in
Elixir `cond` expression. Just like a `cond` expression, the embedded expresions will be
evaluated from top to bottom and the first one that is truthy will provide the resulting value of
the expression. The difference with `cond_wait` is that if none of the expressions is initially
truthy it will wait and periodically re-evaluate them until one of them becomes truthy or a
timeout occurs.
An optional `else` clause may also be used to provide the value in case of a timeout. If an
`else` clause is provided and a timeout occurs, then the `else` clause will be evaluated and
the resulting value of the `else` clause becomes the value of the `cond_wait` expression. If no
`else` clause is provided and a timeout occurs, then the value of the `cond_wait` expression is a
tuple of the form `{:timeout, timeout_milliseconds}`.
## Options
See the WaitForIt module documentation for further discussion of these options.
* `:timeout` - the amount of time to wait (in milliseconds) before giving up
* `:frequency` - the polling frequency (in milliseconds) at which to re-evaluate conditions
* `:signal` - disable polling and use a condition variable of the given name instead
* `:pre_wait` - wait for the given number of milliseconds before evaluating conditions for the first time
## Examples
Trigger an alarm when any sensors go beyond a threshold:
def sound_the_alarm do
WaitForIt.cond_wait timeout: 60_000 * 60 * 24 do
read_sensor(:sensor1) > 9 -> IO.puts("Alarm: :sensor1 too high!")
read_sensor(:sensor2) < 100 -> IO.puts("Alarm: :sensor2 too low!")
read_sensor(:sensor3) < 0 -> IO.puts("Alarm: :sensor3 below zero!")
else
IO.puts("All is good...for now.")
end
sound_the_alarm()
end
"""
defmacro cond_wait(opts \\ [], blocks) do
frequency = Keyword.get(opts, :frequency, 100)
timeout = Keyword.get(opts, :timeout, 5_000)
condition_var = Keyword.get(opts, :signal)
do_block = Keyword.get(blocks, :do)
else_block = Keyword.get(blocks, :else)
pre_wait = Keyword.get(opts, :pre_wait, 0)
quote do
require WaitForIt.Helpers
Helpers.pre_wait(unquote(pre_wait))
Helpers.cond_wait(
unquote(frequency),
unquote(timeout),
Helpers.localized_name(unquote(condition_var)),
Helpers.make_cond_function(unquote(do_block)),
Helpers.make_function(unquote(else_block))
)
end
end
@doc ~S"""
Send a signal to the given condition variable to indicate that any processes waiting on the
condition variable should re-evaluate their wait conditions.
The caller of `signal` must be in the same Elixir module as any waiters on the same condition
variable since the module is used as a namespace for condition variables. This is to prevent
accidental name collisions as well as to enforce good practice for encapsulation.
"""
defmacro signal(condition_var) do
quote do
require WaitForIt.Helpers
Helpers.condition_var_signal(Helpers.localized_name(unquote(condition_var)))
end
end
end
|
lib/wait_for_it.ex
| 0.901265
| 0.966537
|
wait_for_it.ex
|
starcoder
|
defmodule Thunking do
@moduledoc false
# The delay state of a thunk process, has the suspended value
# and a list of pids to send its forced value once forced.
def thunking(:delay, fun, ps) do
receive do
# Upon recieving the connect message will call its
# self with the new process cons'd to its process
# list.
{:connect, p} ->
thunking(:delay, fun, [p | ps])
# Recieving force with a pid and ref, evaluates its
# value, sends the value back to the pid that requested it
# and then distributes the value around.
{:force, pid, ref} ->
y = fun.()
send(pid, {:done, ref, y})
distr(y, ps)
# Similar to above method.
:force ->
y = fun.()
distr(y, ps)
end
end
# Similar to above clause. except has the pid of the
# thunk it expects a value from.
def thunking(:map, source, ref, fun, ps) do
receive do
{:connect, p} ->
thunking(:map, source, ref, fun, [p | ps])
{:force, pid, ref1} ->
send(source, :force)
receive do
{:done, ^ref, x} ->
y = fun.(x)
send(pid, {:done, ref1, y})
distr(y, ps)
end
:force ->
send(source, :force)
receive do
{:done, ^ref, x} ->
y = fun.(x)
distr(y, ps)
end
{:done, ^ref, x} ->
thunking(:delay, fn -> fun.(x) end, ps)
end
end
# Combines two thunks.
def thunking(:product, p1, r1, p2, r2, ps) do
receive do
{:connect, p} ->
thunking(:product, p1, r1, p2, r2, [p | ps])
{:force, pid, ref} ->
send(p1, :force)
send(p2, :force)
receive do
{:done, ^r1, x} ->
receive do
{:done, ^r2, y} ->
z = {x, y}
send(pid, {:done, ref, z})
distr(z, ps)
end
{:done, ^r2, y} ->
receive do
{:done, ^r1, x} ->
z = {x, y}
send(pid, {:done, ref, z})
distr(z, ps)
end
end
:force ->
send(p1, :force)
send(p2, :force)
receive do
{:done, ^r1, x} ->
receive do
{:done, ^r2, y} ->
z = {x, y}
distr(z, ps)
end
{:done, ^r2, y} ->
receive do
{:done, ^r1, x} ->
z = {x, y}
distr(z, ps)
end
end
{:done, ^r1, x} ->
thunking(:product1, x, p2, r2, ps)
{:done, ^r2, y} ->
thunking(:product2, p1, r1, y, ps)
end
end
def thunking(:product1, x, p2, r2, ps) do
receive do
{:connect, p} ->
thunking(:product1, x, p2, r2, [p | ps])
{:force, pid, ref} ->
send(p2, :force)
receive do
{:done, ^r2, y} ->
z = {x, y}
send(pid, {:done, ref, z})
distr(z, ps)
end
:force ->
send(p2, :force)
receive do
{:done, ^r2, y} ->
z = {x, y}
distr(z, ps)
end
{:done, ^r2, y} ->
thunking(:done, {x, y}, ps)
end
end
def thunking(:product2, p1, r1, y, ps) do
receive do
{:connect, p} ->
thunking(:product2, p1, r1, y, [p | ps])
{:force, pid, ref} ->
send(p1, :force)
receive do
{:done, ^r1, x} ->
z = {x, y}
send(pid, {:done, ref, z})
distr(z, ps)
end
:force ->
send(p1, :force)
receive do
{:done, ^r1, x} ->
z = {x, y}
distr(z, ps)
end
{:done, ^r1, x} ->
thunking(:done, {x, y}, ps)
end
end
def thunking(:done, x, ps) do
receive do
{:connect, p} ->
thunking(:done, x, [p | ps])
{:force, pid, ref} ->
send(pid, {:done, ref, x})
distr(x, ps)
:force ->
distr(x, ps)
end
end
# Helper function to distribute values amongst processes.
defp distr(_, []), do: exit(:normal)
defp distr(x, [{pid, ref} | ps]) do
send(pid, {:done, ref, x})
distr(x, ps)
end
end
|
lib/thunking.ex
| 0.709321
| 0.5564
|
thunking.ex
|
starcoder
|
defmodule Advent2019Web.Day18Controller do
use Advent2019Web, :controller
@doc """
From an ASCII labyrinth produce a sparse representation as a map.
"""
@spec labyrinth_string_to_map(String.t()) :: map
def labyrinth_string_to_map(labyrinth) do
String.split(labyrinth, "\n")
|> Enum.with_index()
|> Enum.flat_map(fn {line, line_idx} ->
line
|> String.graphemes()
|> Enum.with_index()
|> Enum.map(fn {char, column_idx} ->
if char != "." do
{{line_idx, column_idx}, char}
end
end)
end)
|> Enum.reject(&(&1 == nil))
|> Map.new()
end
@doc """
Given a map representation of a labyrinth, finds a given element in it.
"""
@spec element_position(map, String.t()) :: {Integer, Integer}
def element_position(labyrinth, element) do
[{coord, _}] =
labyrinth
|> Stream.filter(fn {_, val} -> val == element end)
|> Stream.take(1)
|> Enum.to_list()
coord
end
defp is_key?(k), do: k >= "a" and k <= "z"
@doc """
Given a labyrinth, a starting position and the set of keys already taken,
find what can be reached from that position and at which distance.
If a key is already equipped, it is ignored. If a door can be opened with a key
within the equipped ones, the path search crosses the door.
"""
@spec next_possible_moves(map, {Integer, Integer}, MapSet.t(String.t())) ::
MapSet.t({String.t(), Integer})
def next_possible_moves(labyrinth, start_cell, equipped_keys) do
distances_from(
labyrinth,
MapSet.new([start_cell]),
MapSet.new(),
0,
equipped_keys,
MapSet.new()
)
end
@spec distances_from(
map,
MapSet.t({Integer, Integer}),
MapSet.t({Integer, Integer}),
non_neg_integer,
MapSet.t(String.t()),
MapSet.t({Integer, Integer})
) ::
MapSet.t({String.t(), Integer})
defp distances_from(
labyrinth,
edge_cells,
explored_cells,
distance,
equipped_keys,
found_so_far
) do
# check which goodies are on the edge
new_found =
edge_cells
|> Enum.map(&Map.get(labyrinth, &1))
# ignore already equipped keys
|> Enum.reject(fn k -> MapSet.member?(equipped_keys, k) end)
# ignore non-keys
|> Enum.filter(&is_key?(&1))
|> Enum.map(&{&1, distance})
found_so_far = MapSet.union(found_so_far, MapSet.new(new_found))
# now find the new edge to explore
new_edge_cells =
edge_cells
# the function returns the distances to the keys that can be reached DIRECTLY.
# it cannot step on a key to reach another one, unless it's already taken.
|> Enum.reject(fn k ->
content = Map.get(labyrinth, k)
is_key?(content) and not MapSet.member?(equipped_keys, content)
end)
|> Enum.map(&reachable_cells(labyrinth, &1, equipped_keys))
|> Enum.reduce(MapSet.new(), fn s, tot -> MapSet.union(s, tot) end)
|> MapSet.difference(explored_cells)
if MapSet.size(new_edge_cells) == 0 do
found_so_far
else
distances_from(
labyrinth,
new_edge_cells,
MapSet.union(explored_cells, edge_cells),
distance + 1,
equipped_keys,
found_so_far
)
end
end
@doc """
Check whether a coordinate can be crossed, considering both walls and the
available keys.
"""
def can_cross?(labyrinth, chell_to_check, equipped_keys) do
case Map.get(labyrinth, chell_to_check, nil) do
nil ->
true
"@" ->
true
"#" ->
false
x when x >= "a" and x <= "z" ->
true
x ->
MapSet.member?(equipped_keys, String.downcase(x))
end
end
@doc """
Given a cell coordinate, return the cells directly reachable from it, taking
into account the walls of the labyrinth and the equipped keys.
"""
def reachable_cells(labyrinth, {col, row}, equipped_keys) do
[
{col, row + 1},
{col + 1, row},
{col - 1, row},
{col, row - 1}
]
|> Enum.filter(&can_cross?(labyrinth, &1, equipped_keys))
|> MapSet.new()
end
@doc """
Find the best sequence of keys to get from a labyrinth in order to minimize
the steps, and the corresponding amount of steps.
"""
@spec best_key_sequence(map, {Integer, Integer}, MapSet.t(String.t()), Integer, Integer) ::
{Integer, List}
def best_key_sequence(
labyrinth,
start_pos,
equipped_keys \\ MapSet.new(),
distance_limit \\ Inf,
distance_so_far \\ 0
) do
# all the possible next keys to retrieve
candidates =
next_possible_moves(labyrinth, start_pos, equipped_keys)
|> Enum.sort_by(fn {_, distance} -> distance end)
# only the ones that are not so distant that we already found a better path for sure
meaningful_candidates =
Enum.reject(candidates, fn {_, distance} -> distance + distance_so_far > distance_limit end)
IO.inspect(
{"keys:", equipped_keys, "distance so far:", distance_so_far, "limit:", distance_limit}
)
# nothing to be retrieved ? the path is complete, terminate it
if length(candidates) == 0 do
{0, []}
else
# maybe there was something to retrieve, but too distant and the search stops
if length(meaningful_candidates) == 0 do
{nil, nil}
else
# something to retrieve and not too distant, explore the possibility
Enum.reduce(candidates, {Inf, ~w(fake sequence)}, fn {key, distance},
{best_distance_so_far,
best_sequence_so_far} ->
{extra_distance, next_keys} =
best_key_sequence(
labyrinth,
element_position(labyrinth, key),
MapSet.put(equipped_keys, key),
best_distance_so_far,
distance_so_far + distance
)
# extra_distance can be nil if the search was interrupted
if extra_distance != nil and distance + extra_distance < best_distance_so_far do
{distance + extra_distance, [key | next_keys]}
else
{best_distance_so_far, best_sequence_so_far}
end
end)
end
end
end
def solve1(conn, params) do
labyrinth_str = params["labyrinth"]
labyrinth = labyrinth_string_to_map(labyrinth_str)
{distance, keys} = best_key_sequence(labyrinth, element_position(labyrinth, "@"))
json(conn, %{
result: distance,
keys: keys
})
end
end
|
lib/advent2019_web/controllers/day18_controller.ex
| 0.822617
| 0.491151
|
day18_controller.ex
|
starcoder
|
defmodule Plymio.Vekil.Forom.Proxy do
@moduledoc ~S"""
The module implements the `Plymio.Vekil.Forom` protocol and manages
a *proxy*.
A **proxy** *forom* holds a reference to another *proxy* in its `:forom`
field.
When a **proxy** *forom* is produced or realised the *proxy* in the
`:forom` fields is used, together with the *vekil*, in a call to
`Plymio.Vekil.proxy_fetch/2` and the *forom* returned by the fetch
is produced or realised.
If the **proxy** *form* does not have a *vekil* an error result is returned.
In many examples the *proxy* is an atom but that is not a
constraint: its type (e.g. atom, string, whatever) is defined and
decided by the *vekil*. (So, for example, looking up an atom in a
*vekil* where the *proxies* ("keys") are strings will never succeed).
The *vekils* used in the doctests below use atom *proxies*.
See `Plymio.Vekil.Forom` for the definitions of the protocol functions.
See `Plymio.Vekil` for an explanation of the test environment.
## Module State
See `Plymio.Vekil.Forom` for the common fields.
The default `:produce_default` is an empty list.
The default `:realise_default` is *the unset value* (`Plymio.Fontais.the_unset_value/0`).
The module's state is held in a `struct` with the following field(s):
| Field | Aliases | Purpose |
| :--- | :--- | :--- |
| `:forom` | | *holds the proxy* |
"""
require Plymio.Fontais.Guard
require Plymio.Fontais.Option
require Plymio.Fontais.Vekil.ProxyForomDict, as: PROXYFOROMDICT
use Plymio.Fontais.Attribute
use Plymio.Vekil.Attribute
@type t :: %__MODULE__{}
@type opts :: Plymio.Fontais.opts()
@type error :: Plymio.Fontais.error()
@type kv :: Plymio.Fontais.kv()
@type product :: Plymio.Vekil.product()
import Plymio.Fontais.Error,
only: [
new_error_result: 1
],
warn: false
import Plymio.Fontais.Guard,
only: [
is_value_set: 1,
is_value_unset_or_nil: 1
]
import Plymio.Fontais.Option,
only: [
opts_create_aliases_dict: 1,
opts_canonical_keys: 2
]
@plymio_vekil_forom_proxy_kvs_aliases [
# struct
@plymio_vekil_field_alias_vekil,
@plymio_vekil_field_alias_forom,
@plymio_vekil_field_alias_proxy,
@plymio_vekil_field_alias_seen,
@plymio_vekil_field_alias_produce_default,
@plymio_vekil_field_alias_realise_default,
@plymio_fontais_field_alias_protocol_name,
@plymio_fontais_field_alias_protocol_impl,
# virtual
@plymio_vekil_field_alias_realise_default
]
@plymio_vekil_forom_proxy_dict_aliases @plymio_vekil_forom_proxy_kvs_aliases
|> opts_create_aliases_dict
@doc false
def update_canonical_opts(opts, dict \\ @plymio_vekil_forom_proxy_dict_aliases) do
opts |> opts_canonical_keys(dict)
end
@plymio_vekil_defstruct [
{@plymio_vekil_field_vekil, @plymio_fontais_the_unset_value},
{@plymio_vekil_field_forom, @plymio_fontais_the_unset_value},
{@plymio_vekil_field_proxy, @plymio_fontais_the_unset_value},
{@plymio_vekil_field_seen, @plymio_fontais_the_unset_value},
{@plymio_vekil_field_produce_default, []},
{@plymio_vekil_field_realise_default, @plymio_fontais_the_unset_value},
{@plymio_fontais_field_protocol_name, Plymio.Vekil.Forom},
{@plymio_fontais_field_protocol_impl, __MODULE__}
]
defstruct @plymio_vekil_defstruct
@doc_new ~S"""
`new/1` takes an optional *opts* and creates a new *forom* returning `{:ok, forom}`.
## Examples
iex> {:ok, forom} = new()
...> match?(%FOROMPROXY{}, forom)
true
`Plymio.Vekil.Utility.forom?/1` returns `true` if the value implements `Plymio.Vekil.Forom`
iex> {:ok, forom} = new()
...> forom |> Plymio.Vekil.Utility.forom?
true
The *proxy* is passed using the `:forom` key:
iex> {:ok, forom1} = FOROMFORM.new(forom: :x_add_1)
...> {:ok, forom} = new(forom: forom1)
...> forom |> Plymio.Vekil.Utility.forom?
true
"""
@doc_update ~S"""
`update/2` implements `Plymio.Vekil.Forom.update/2`.
## Examples
iex> {:ok, forom} = new(forom: :x_add_1)
...> {:ok, forom} = forom |> FOROMPROT.update(forom: :x_mul_x)
...> realise_opts = [vekil: vekil_helper_form_vekil_example1()]
...> {:ok, {form, _}} = forom |> FOROMPROT.realise(realise_opts)
...> form |> harnais_helper_test_forms!(binding: [x: 3])
{9, ["x = x * x"]}
"""
@doc_normalise ~S"""
`normalise/1` creates a new *forom* from its argument unless the argument is already one.
For a **proxy** *forom* `normalise/1` offers a small way to reduce boilerplate.
## Examples
Here the argument is an atom and a *proxy forom* is created. Note a
*vekil* is needed to resolve the *proxy*.
iex> {:ok, %FOROMPROXY{} = forom} = :x_mul_x |> normalise
...> realise_opts = [vekil: vekil_helper_form_vekil_example1()]
...> {:ok, {form, _}} = forom |> FOROMPROT.realise(realise_opts)
...> form |> harnais_helper_test_forms!(binding: [x: 3])
{9, ["x = x * x"]}
The function accepts any value as the *proxy*; it is only when the
*proxy* is accessed by a *vekil* (e.g. `Plymio.Vekil.proxy_fetch/2`)
can the type of the proxy be known (e.g. an atom) and invalid ones caught.
iex> {:ok, %FOROMPROXY{} = forom} = "maybe a proxy" |> normalise
...> realise_opts = [vekil: vekil_helper_form_vekil_example1()]
...> {:error, error} = forom |> FOROMPROT.realise(realise_opts)
...> error |> Exception.message
"proxy invalid, got: maybe a proxy"
"""
@doc_produce ~S"""
`produce/2` takes a *forom* and an optional *opts*.
The value in the `:forom` field is used, together with the essential *vekil*, in a call to
`Plymio.Vekil.proxy_fetch/2` and the *forom* returned by the fetch.
is produced.
## Examples
iex> {:ok, forom} = new(forom: :x_add_1)
...> produce_opts = [vekil: vekil_helper_form_vekil_example1()]
...> {:ok, {product, _}} = forom |> FOROMPROT.produce(produce_opts)
...> product |> Keyword.get_values(:forom)
...> |> harnais_helper_test_forms!(binding: [x: 7])
{8, ["x = x + 1"]}
A variation of the above example showing that the product has
multiple `:forom` keys, one for each of the constituent, terminal
*forom* in the `x_funs` **list** *forom*.
iex> {:ok, forom} = new(forom: :x_funs)
...> produce_opts = [vekil: vekil_helper_form_vekil_example1()]
...> {:ok, {product, _}} = forom |> FOROMPROT.produce(produce_opts)
...> [:forom] = product |> Keyword.keys |> Enum.uniq
...> 3 = product |> Keyword.keys |> length
...> product |> Keyword.get_values(:forom)
...> |> harnais_helper_test_forms!(binding: [x: 7])
{63, ["x = x + 1", "x = x * x", "x = x - 1"]}
A **proxy** *forom* can reference another *proxy* which is itself a
**proxy** *forom* **proxy** *forom* and so on:
iex> {:ok, %VEKILFORM{} = vekil} = [dict: [
...> p1: :p2,
...> p2: :p3,
...> p3: "The End"
...> ]] |> VEKILFORM.new()
...> {:ok, forom} = new(forom: :p1)
...> produce_opts = [vekil: vekil]
...> {:ok, {product, _}} = forom |> FOROMPROT.produce(produce_opts)
...> product |> Keyword.get_values(:forom)
["The End"]
There must be a valid *vekil*:
iex> {:ok, forom} = new(forom: :x_add_1)
...> produce_opts = [vekil: :invalid_vekil]
...> {:error, error}= forom |> FOROMPROT.produce(produce_opts)
...> error |> Exception.message
"vekil invalid, got: :invalid_vekil"
iex> {:ok, forom} = new(forom: :not_a_proxy)
...> {:error, error}= forom |> FOROMPROT.produce
...> error |> Exception.message
"vekil is unset"
An empty *forom* does not produce any `:forom` keys:
iex> {:ok, forom} = new()
...> {:ok, {product, _}} = forom |> FOROMPROT.produce
...> product |> Keyword.get_values(:forom)
[]
"""
@doc_realise ~S"""
`realise/2` takes a *forom* and an optional *opts*, calls
`produce/2`, and then gets (`Keyword.get_values/2`) the `:forom` key values.
The example are reqorked ones from `produce/2`
## Examples
iex> {:ok, forom} = new(forom: :x_funs)
...> realise_opts = [vekil: vekil_helper_form_vekil_example1()]
...> {:ok, {forms, _}} = forom |> FOROMPROT.realise(realise_opts)
...> 3 = forms |> length
...> forms |> harnais_helper_test_forms!(binding: [x: 7])
{63, ["x = x + 1", "x = x * x", "x = x - 1"]}
iex> {:ok, %VEKILFORM{} = vekil} = [dict: [
...> p1: :p2,
...> p2: :p3,
...> p3: "The End"
...> ]] |> VEKILFORM.new()
...> {:ok, forom} = new(forom: :p1)
...> realise_opts = [vekil: vekil]
...> {:ok, {values, _}} = forom |> FOROMPROT.realise(realise_opts)
...> values
["The End"]
"""
@vekil [
Plymio.Vekil.Codi.Dict.__vekil__(),
# overrides to the defaults
%{
state_def_new_doc: quote(do: @doc(unquote(@doc_new))),
state_def_update_doc: quote(do: @doc(unquote(@doc_update))),
vekil_forom_def_normalise_doc: quote(do: @doc(unquote(@doc_normalise))),
vekil_forom_def_produce_doc: quote(do: @doc(unquote(@doc_produce))),
vekil_forom_def_realise_doc: quote(do: @doc(unquote(@doc_realise)))
}
]
|> PROXYFOROMDICT.create_proxy_forom_dict!()
@vekil_proxies [
:state_base_package,
:state_defp_update_field_header,
:state_vekil_defp_update_field_vekil_passthru,
:state_vekil_proxy_defp_update_field_proxy_passthru,
:state_vekil_forom_defp_update_field_forom_passthru,
:state_vekil_defp_update_field_produce_default_passthru,
:state_vekil_defp_update_field_realise_default_passthru,
:state_vekil_defp_update_field_seen_validate,
:state_defp_update_field_unknown,
:vekil_defp_validate_vekil,
:vekil_forom_proxy_def_produce,
:vekil_forom_proxy_def_realise,
:vekil_forom_proxy_defp_realise_product,
:vekil_forom_def_normalise,
:vekil_forom_proxy_defp_forom_value_normalise
]
@codi_opts [
{@plymio_fontais_key_dict, @vekil}
]
@vekil_proxies
|> PROXYFOROMDICT.reify_proxies(@codi_opts)
@doc false
@since "0.1.0"
def seen_ensure(seen \\ nil)
def seen_ensure(seen) when is_map(seen) do
{:ok, seen}
end
def seen_ensure(seen) when is_value_unset_or_nil(seen) do
{:ok, %{}}
end
def seen_ensure(seen) do
new_error_result(m: "seen invalid", v: seen)
end
@doc false
@since "0.1.0"
def seen_has_proxy?(seen, proxy) when is_map(seen) do
seen |> Map.has_key?(proxy)
end
@doc false
@since "0.1.0"
def seen_put_proxy(seen, proxy, value \\ nil) when is_map(seen) do
{:ok, seen |> Map.put(proxy, value)}
end
defp forom_ensure_seen_init(forom)
defp forom_ensure_seen_init(%__MODULE__{@plymio_vekil_field_seen => seen} = state)
when is_value_unset_or_nil(seen) do
{:ok, state |> struct!([{@plymio_vekil_field_seen, %{}}])}
end
defp forom_ensure_seen_init(%__MODULE__{@plymio_vekil_field_seen => seen} = state)
when is_map(seen) do
{:ok, state}
end
@doc false
@since "0.1.0"
def forom_add_seen_proxy(%__MODULE__{} = state, proxy, value \\ nil) do
with {:ok, %__MODULE__{@plymio_vekil_field_seen => seen} = state} <-
state
|> forom_ensure_seen_init,
{:ok, seen} <- seen |> seen_put_proxy(proxy, value),
{:ok, %__MODULE__{}} = result <- state |> forom_update_seen(seen) do
result
else
{:error, %{__exception__: true}} = result -> result
end
end
@doc false
@since "0.1.0"
def forom_has_seen_proxy?(forom, proxy)
def forom_has_seen_proxy?(%__MODULE__{@plymio_vekil_field_seen => seen}, proxy)
when is_map(seen) do
seen |> seen_has_proxy?(proxy)
end
def forom_has_seen_proxy?(%__MODULE__{}, _proxy) do
false
end
defp forom_update_seen(%__MODULE__{} = state, seen)
when is_map(seen) do
{:ok, state |> struct!([{@plymio_vekil_field_seen, seen}])}
end
end
defimpl Plymio.Vekil.Forom, for: Plymio.Vekil.Forom.Proxy do
@funs :functions
|> @protocol.__info__
|> Keyword.drop([:__protocol__, :impl_for, :impl_for!])
for {fun, arity} <- @funs do
defdelegate unquote(fun)(unquote_splicing(Macro.generate_arguments(arity, nil))), to: @for
end
end
defimpl Inspect, for: Plymio.Vekil.Forom.Proxy do
use Plymio.Vekil.Attribute
import Plymio.Fontais.Guard,
only: [
is_value_unset_or_nil: 1
]
def inspect(
%Plymio.Vekil.Forom.Proxy{
@plymio_vekil_field_vekil => vekil,
@plymio_vekil_field_forom => forom,
@plymio_vekil_field_proxy => proxy
},
_opts
) do
vekil_telltale =
vekil
|> case do
x when is_value_unset_or_nil(x) -> nil
_ -> "+K"
end
forom_telltale =
forom
|> case do
x when is_value_unset_or_nil(x) -> "-F"
x when is_list(x) -> "F=L#{length(x)}"
x when is_atom(x) -> "F=#{to_string(x)}"
_x -> "+F"
end
proxy_telltale =
proxy
|> case do
x when is_value_unset_or_nil(x) -> nil
x when is_atom(x) -> "P=#{to_string(proxy)}"
_x -> "+P"
end
forom_telltale =
[
proxy_telltale,
forom_telltale,
vekil_telltale
]
|> List.flatten()
|> Enum.reject(&is_nil/1)
|> Enum.join("; ")
"FOROMProxy(#{forom_telltale})"
end
end
|
lib/vekil/concrete/forom/proxy.ex
| 0.859649
| 0.554531
|
proxy.ex
|
starcoder
|
defmodule ChallengeGov.Reports do
@moduledoc """
Context for creating a report
"""
import Ecto.Query
import Ecto.Changeset
alias ChallengeGov.Repo
alias ChallengeGov.Reports.Report
alias ChallengeGov.SecurityLogs.SecurityLog
# @doc """
# Stream security log records for CSV download
# """
def stream_all_records() do
records =
SecurityLog
|> order_by([r], asc: r.id)
|> Repo.all()
{:ok, records}
end
def filter_by_params(params) do
%{"report" => %{"year" => year, "month" => month, "day" => day}} = params
changeset =
Report.changeset(%Report{}, %{
"year" => sanitize_param(year),
"month" => sanitize_param(month),
"day" => sanitize_param(day)
})
if changeset.valid? do
{datetime_start, datetime_end} =
range_from(
sanitize_param(year),
sanitize_param(month),
sanitize_param(day)
)
records =
SecurityLog
|> where([r], r.logged_at >= ^datetime_start)
|> where([r], r.logged_at <= ^datetime_end)
|> order_by([r], asc: r.id)
|> Repo.all()
{:ok, records}
else
changeset = apply_action(changeset, :update)
changeset
end
end
defp sanitize_param(value) do
if value == "", do: nil, else: String.to_integer(value)
end
defp range_from(year, month, day) do
case {year, month, day} do
{year, month, day} when month == nil and day == nil ->
# just year given
datetime_start =
year
|> Timex.beginning_of_year()
|> Timex.to_datetime()
datetime_end =
year
|> Timex.end_of_year()
|> Timex.to_datetime()
|> Timex.end_of_day()
|> Timex.to_datetime()
{datetime_start, datetime_end}
{year, month, day} when day == nil ->
# month/year given
datetime_start =
year
|> Timex.beginning_of_month(month)
|> Timex.to_datetime()
|> Timex.beginning_of_day()
datetime_end =
year
|> Timex.end_of_month(month)
|> Timex.to_datetime()
|> Timex.end_of_day()
{datetime_start, datetime_end}
{year, month, day} ->
# day/month/year given
datetime_start =
{year, month, day}
|> Timex.to_datetime()
|> Timex.beginning_of_day()
datetime_end =
{year, month, day}
|> Timex.to_datetime()
|> Timex.end_of_day()
{datetime_start, datetime_end}
end
end
end
|
lib/challenge_gov/reports.ex
| 0.52902
| 0.400486
|
reports.ex
|
starcoder
|
defmodule Ueberauth.Strategy.Weebly do
@moduledoc """
Provides an Ueberauth strategy for authenticating with Weebly.
### Setup
Create an application in Weebly for you to use.
Register a new application at the [weebly developers page](https://dev.weebly.com/) and get the `client_id` and `client_secret`.
Include the provider in your configuration for Ueberauth
config :ueberauth, Ueberauth,
providers: [
weebly: { Ueberauth.Strategy.Weebly, [] }
]
Then include the configuration for weebly.
config :ueberauth, Ueberauth.Strategy.Weebly.OAuth,
client_id: System.get_env("WEEBLY_API_KEY"),
client_secret: System.get_env("WEEBLY_SECRET")
If you haven't already, create a pipeline and setup routes for your callback handler
pipeline :auth do
Ueberauth.plug "/auth"
end
scope "/auth" do
pipe_through [:browser, :auth]
get "/:provider/callback", AuthController, :callback
end
Create an endpoint for the callback where you will handle the `Ueberauth.Auth` struct
defmodule MyApp.AuthController do
use MyApp.Web, :controller
def callback_phase(%{ assigns: %{ ueberauth_failure: fails } } = conn, _params) do
# do things with the failure
end
def callback_phase(%{ assigns: %{ ueberauth_auth: auth } } = conn, params) do
# do things with the auth
end
end
You can edit the behaviour of the Strategy by including some options when you register your provider.
To set the default 'scopes' (permissions):
config :ueberauth, Ueberauth,
providers: [
weebly: { Ueberauth.Strategy.Weebly, [default_scope: "read:site,read:store-catalog"] }
]
Default is "read:site,write:site"
"""
use Ueberauth.Strategy,
uid_scope: :token,
uid_field: :site_id,
default_scope: "read:site,write:site",
oauth2_module: Ueberauth.Strategy.Weebly.OAuth
alias Ueberauth.Auth.{Credentials, Info}
@doc """
Handles the initial redirect to the Weebly authentication page.
"""
def handle_request!(%Plug.Conn{} = conn) do
opts = get_options(conn)
redirect!(conn, Ueberauth.Strategy.Weebly.OAuth.authorize_url!(opts))
end
@doc """
Handles the callback from Weebly. When there is a failure from Weebly the failure is included in the
`ueberauth_failure` struct. Otherwise the information returned from Weebly is returned in the `Ueberauth.Auth` struct.
"""
def handle_callback!(%Plug.Conn{params: %{"authorization_code" => _} = params} = conn) do
module = option(conn, :oauth2_module)
params = [
code: params["authorization_code"],
site_id: params["site_id"],
user_id: params["user_id"]
]
token = apply(module, :get_token!, [params]).token
if token.access_token == nil do
err = token.other_params["error"]
desc = token.other_params["error_description"]
set_errors!(conn, [error(err, desc)])
else
conn
|> put_private(:weebly_token, token)
|> get_user()
end
end
@doc false
def handle_callback!(conn) do
set_errors!(conn, [error("missing_code", "No code or shop received")])
end
@doc """
Cleans up the private area of the connection used for passing the raw Weebly response around during the callback.
"""
def handle_cleanup!(conn) do
conn
|> put_private(:weebly_token, nil)
|> put_private(:weebly_user, nil)
end
@doc """
Fetches the uid field from the Weebly response.
"""
def uid(conn) do
uid_field = conn |> option(:uid_field) |> to_string()
case option(conn, :uid_scope) do
:user -> conn.private.weebly_user[uid_field]
:token -> conn.private.weebly_token.other_params[uid_field]
end
end
@doc """
Includes the info for the Weebly user.
"""
def info(conn) do
%Info{
email: conn.private.weebly_user["email"],
name: conn.private.weebly_user["name"]
}
end
@doc """
Includes the credentials from the Weebly response.
"""
def credentials(conn) do
token = conn.private.weebly_token
%Credentials{token: token.access_token, token_type: token.token_type}
end
defp get_user(conn) do
access_token = conn.private.weebly_token.access_token
profile_url = "https://api.weebly.com/v1/user"
headers = [{"x-weebly-access-token", access_token}]
case Ueberauth.Strategy.Weebly.OAuth.get(profile_url, headers) do
{:ok, %OAuth2.Response{status_code: 401, body: _body}} ->
set_errors!(conn, [error("token", "unauthorized")])
{:ok, %OAuth2.Response{status_code: status_code, body: user}}
when status_code in 200..399 ->
put_private(conn, :weebly_user, user)
{:error, %OAuth2.Error{reason: reason}} ->
set_errors!(conn, [error("OAuth2", reason)])
end
end
defp get_options(conn) do
[
scope: conn.params["scope"] || option(conn, :default_scope),
redirect_uri: callback_url(conn),
site_id: conn.params["site_id"],
user_id: conn.params["user_id"],
version: conn.params["version"]
]
|> Enum.reject(fn {_, v} -> is_nil(v) end)
end
defp option(conn, key) do
Keyword.get(options(conn), key, Keyword.get(default_options(), key))
end
end
|
lib/strategy/weebly.ex
| 0.688259
| 0.415462
|
weebly.ex
|
starcoder
|
defmodule Griffin.Model.Module do
@moduledoc """
Module for interacting with Griffin models. Griffin models are elixir
modules that expect certain functions to be defined for the sake of data
validation, database persistence, and GraphQL integration.
"""
@doc """
Gets the singular and plural namespace of a model module.
"""
def namespaces(model) do
if is_tuple(model.namespace) do
model.namespace
else
plural =
model.namespace
|> to_string
|> Inflex.pluralize()
|> String.to_atom()
{model.namespace, plural}
end
end
@doc """
Accepts a model module and passes a `ctx` map through its `resolve`
function. This `resolve` function is expected to return `ctx` with
a Poison encodable List or Map in `ctx.res` and/or an errors List in
`ctx.errors`.
```
defmodule MyModel do
def resolve(ctx) do
# Pipe `ctx` through some middleware and return:
%{
res: case ctx.op do
:create -> %{"create": "json"},
:read -> %{read": "json"},
:update -> %{update: "json"},
:delete -> %{delete: "json"},
:list -> [%{list: "json"}],
end
}
end
end
```
`ctx` is a map that contains the information of the operation. This map is
a hetergenous blob of relevant data depending on the operation and context of
the operation. For instance it can minimally contain `ctx.args` which are the
arguments to the CRUDL operation and an empty `ctx.res` map and `ctx.errors`
list expected to be filled in. In the case of an HTTP GraphQL request it may
contain headers, a logged in user, or a number of other things the user can
attach to the context by agumenting the map as it pipes through `resolve`.
This allows a lot of flexibility when composing behavior and passing through
dependencies. For example, creating a user model may involve validating,
saving data to the database, sending a confirmation email, and confirming
that email upon a subsequent update. That might be expressed like so...
```
defmodule User do
def fields do: [
email: [:email,
on_create: :required],
email_confirmation_hash: [:string, :uuid,
on_create_read_delete_list: :forbidden]
]
def resolve(ctx) do
ctx
|> validate(&fields/0)
|> confirm_email
|> to_db_statement
|> send_confirmation_email
end
end
```
One can build on top of this simple foundation for more intricate design
patterns using various native Elixir techniques like pattern matching, macros,
better function composing, etc. For instance one could acheive something more
familiar to Active Recod-style callbacks like so...
```
defmodule User do
def fields do: ...
def after_validation(ctx) do: ctx
def after_validation(ctx) when Enum.member? [:create, :update], ctx.op do
ctx
|> set_location
end
def before_validation(ctx) do: ctx
def before_validation(ctx) when ctx.op == :create do
ctx
|> normalize_name
end
def resolve(ctx) do
ctx
|> before_validation
|> validate(fields)
|> after_validation
|> before_save
|> to_db_statement
|> after_save
end
end
```
Each middleware function here must return `ctx` to be piped through to the
next eventually returning a `ctx` map to be used for a response.
"""
def resolve(model, crud_op, args) do
model.resolve(%{
_model: model,
args: args,
res: %{},
op: crud_op,
errs: []
})
end
end
|
lib/griffin/model/module.ex
| 0.876311
| 0.883034
|
module.ex
|
starcoder
|
defmodule Regulator.Limit.Gradient do
@moduledoc """
Limiter based on the gradient of change. Based on Netflix's gradient2 algorithm.
https://github.com/Netflix/concurrency-limits.
## Options
* `:initial_limit` - The initial limit when the regulator is installed (defaults to 20).
* `:min_limit` - The minimum limit for the regulator (defaults to 5).
* `:max_limit` - The maximum limit for the regulator (defaults to 200).
* `:smoothing` - Determines how aggressively the concurrency limit can shrink
if there is queueing. Numbers should be between 0.0 and 1.0. Higher numbers
will cause the limit to shrink faster.
* `:rtt_tolerance` - Specifies how much change in average round trip time will
be allowed before reducing the concurrency limit. A value of 2.0 would mean
that a 2x increase in rtt would be acceptable. Default 1.5.
* `:long_window_count` - Defines the number of sample windows that will be considered in the long term moving average. Setting this value lower will cause the long term window to adjust more aggressively. Default 600.
"""
@behaviour Regulator.Limit
alias Regulator.Limit.ExpAvg
alias Regulator.Window
defstruct [
min_limit: 5,
max_limit: 200,
smoothing: 0.2,
rtt_tolerance: 1.5,
# This LongRTT is an exponential moving average. We use 600 samples here in
# which roughly works out to 10 minutes of sample time.
long_rtt: ExpAvg.new(600, 10),
estimated_limit: 5,
last_rtt: 0,
]
@impl true
def new(opts) do
opts = if opts[:initial_limit] do
put_in(opts, [:estimated_limit], opts[:initial_limit])
else
opts
end
struct(__MODULE__, opts)
end
@impl true
def initial(config) do
config.estimated_limit
end
@impl true
def update(gradient, _current_limit, window) do
queue_size = 2 # This should be determined dynamically
case Window.avg_rtt(window) do
0 ->
{gradient, gradient.estimated_limit}
short_rtt ->
long_rtt = update_long_rtt(gradient.long_rtt, short_rtt)
gradient = %{gradient | long_rtt: long_rtt}
# If we don't have enough inflight requests we don't really need to grow the limit
# So just bail out.
if window.max_inflight < gradient.estimated_limit / 2 do
{gradient, gradient.estimated_limit}
else
grad = max(0.5, min(1.0, gradient.rtt_tolerance * long_rtt.value / short_rtt))
new_limit = gradient.estimated_limit * grad + queue_size
# Calculate the EMA of the estimated limit
new_limit = gradient.estimated_limit * (1 - gradient.smoothing) + new_limit * gradient.smoothing
# Clamp the limit values based on the users configuration
new_limit = max(gradient.min_limit, min(gradient.max_limit, new_limit))
gradient = %{gradient | estimated_limit: new_limit}
{gradient, trunc(new_limit)}
end
end
end
defp update_long_rtt(long_rtt, rtt) do
long_rtt = ExpAvg.add(long_rtt, rtt)
# If the long RTT is substantially larger than the short rtt then reduce the
# long RTT measurement. This can happen when latency returns to normal after
# a excessive load. Reducing the long RTT without waiting for the exponential
# smoothing helps bring teh system back to steady state.
if long_rtt.value / rtt > 2 do
ExpAvg.update(long_rtt, fn current -> current * 0.95 end)
else
long_rtt
end
end
end
|
lib/regulator/limit/gradient.ex
| 0.902314
| 0.802556
|
gradient.ex
|
starcoder
|
defmodule AlchemyVM do
use GenServer
alias AlchemyVM.Decoder
alias AlchemyVM.ModuleInstance
alias AlchemyVM.Store
alias AlchemyVM.Executor
alias AlchemyVM.Helpers
require IEx
@enforce_keys [:modules, :store]
defstruct [:modules, :store]
@moduledoc """
Execute WebAssembly code
"""
@doc """
Starts the Virtual Machine and returns the PID which is used to
interface with the VM.
"""
@spec start :: {:ok, pid}
def start, do: GenServer.start_link(__MODULE__, [])
@doc false
def init(_args), do: {:ok, %AlchemyVM{modules: %{}, store: %Store{}}}
@doc """
Load a binary WebAssembly file (.wasm) as a module into the VM
"""
@spec load_file(pid, String.t(), map) :: {:ok, AlchemyVM.Module}
def load_file(ref, filename, imports \\ %{}) do
GenServer.call(ref, {:load_module, Decoder.decode_file(filename), imports}, :infinity)
end
@doc """
Load a WebAssembly module directly from a binary into the VM
"""
@spec load(pid, binary, map) :: {:ok, AlchemyVM.Module}
def load(ref, binary, imports \\ %{}) when is_binary(binary) do
GenServer.call(ref, {:load_module, Decoder.decode(binary), imports}, :infinity)
end
@doc """
Load a module that was already decoded by load/3 or load_file/3. This is useful
for caching modules, as it skips the entire decoding step.
"""
@spec load_module(pid, AlchemyVM.Module, map) :: {:ok, AlchemyVM.Module}
def load_module(ref, module, imports \\ %{}) do
GenServer.call(ref, {:load_module, module, imports}, :infinity)
end
@doc """
Call an exported function by name from the VM. The function must have
been loaded in through a module using load_file/2 or load/2 previously
## Usage
### Most basic usage for a simple module (no imports or host functions):
#### Wasm File (add.wat)
```
(module
(func (export "basic_add") (param i32 i32) (result i32)
get_local 0
get_local 1
i32.add
)
)
```
Use an external tool to compile add.wat to add.wasm (compile from text
representation to binary representation)
{:ok, pid} = AlchemyVM.start() # Start the VM
AlchemyVM.load_file(pid, "path/to/add.wasm") # Load the module that contains our add function
# Call the add function, passing in 3 and 10 as args
{:ok, gas, result} = AlchemyVM.execute(pid, "basic_add", [3, 10])
### Executing modules with host functions:
#### Wasm file (log.wat)
```
(module
(import "env" "consoleLog" (func $consoleLog (param f32)))
(export "getSqrt" (func $getSqrt))
(func $getSqrt (param f32) (result f32)
get_local 0
f32.sqrt
tee_local 0
call $consoleLog
get_local 0
)
)
```
Use an external tool to compile log.wat to log.wasm (compile from text
representation to binary representation)
{:ok, pid} = AlchemyVM.start() # Start the VM
# Define the imports used in this module. Keys in the import map
# must be strings
imports = %{
"env" => %{
"consoleLog" => fn x -> IO.puts "its \#{x}" end
}
}
# Load the file, passing in the imports
AlchemyVM.load_file(pid, "path/to/log.wasm", imports)
# Call getSqrt with an argument of 25
AlchemyVM.execute(pid, "getSqrt", [25])
Program execution can also be limited by specifying a `:gas_limit` option:
AlchemyVM.execute(pid, "some_func", [], gas_limit: 100)
This will stop execution of the program if the accumulated gas exceeds 100
Program execution can also output to a log file by specifying a `:trace` option:
AlchemyVM.execute(pid, "some_func", [], trace: true)
This will trace all instructions passed, as well as the gas cost accumulated to a log file
"""
@spec execute(pid, String.t(), list, list) :: :ok | {:ok, any} | {:error, any}
def execute(ref, func, args \\ [], opts \\ []) do
opts = Keyword.merge([gas_limit: :infinity], opts)
GenServer.call(ref, {:execute, func, args, opts}, :infinity)
end
@doc """
Retrieve a Virtual Memory set from the VM. Memory must have been exported
from the WebAssembly module in order to be accessible here.
"""
@spec get_memory(pid, String.t()) :: AlchemyVM.Memory
def get_memory(ref, mem_name) do
GenServer.call(ref, {:get_mem, mem_name}, :infinity)
end
@doc """
Write to a module's exported memory directly. Memory must have been exported
from the WebAssembly module in order to be accessible here.
"""
@spec update_memory(pid, String.t(), AlchemyVM.Memory) :: AlchemyVM
def update_memory(ref, mem_name, mem) do
GenServer.call(ref, {:update_mem, mem_name, mem}, :infinity)
end
@doc """
Returns the state for a given VM instance
"""
@spec vm_state(pid) :: AlchemyVM
def vm_state(ref), do: GenServer.call(ref, :vm_state, :infinity)
def handle_call({:load_module, module, imports}, _from, vm) do
module = Map.put(module, :resolved_imports, imports)
{moduleinst, store} = ModuleInstance.instantiate(ModuleInstance.new(), module, vm.store)
modules = Map.put(vm.modules, moduleinst.ref, moduleinst)
vm = Map.merge(vm, %{modules: modules, store: store})
if module.start do
startidx = module.start
%{^startidx => start_addr} = moduleinst.funcaddrs
{:reply, {:ok, module}, vm, {:continue, {:start, start_addr}}}
else
{:reply, {:ok, module}, vm}
end
end
def handle_call({:execute, fname, args, opts}, _from, vm) do
{reply, vm} =
case Helpers.get_export_by_name(vm, fname, :func) do
:not_found -> {{:error, :no_exported_function, fname}, vm}
addr -> execute_func(vm, addr, args, opts[:gas_limit], fname, opts)
end
{:reply, reply, vm}
end
def handle_call({:get_mem, mname}, _from, vm) do
reply =
case Helpers.get_export_by_name(vm, mname, :mem) do
:not_found -> {:error, :no_exported_mem, mname}
addr -> Enum.at(vm.store.mems, addr)
end
{:reply, reply, vm}
end
def handle_call({:update_mem, mname, mem}, _from, vm) do
case Helpers.get_export_by_name(vm, mname, :mem) do
:not_found -> {:reply, {:error, :no_exported_mem, mname}, vm}
addr ->
mems = List.replace_at(vm.store.mems, addr, mem)
store = Map.put(vm.store, :mems, mems)
reply = Map.put(vm, :store, store)
{:reply, reply, reply}
end
end
def handle_call(:vm_state, _from, vm), do: {:reply, vm, vm}
def handle_continue({:start, start_addr}, vm) do
{_, vm} = execute_func(vm, start_addr, [], :infinity, "start", [])
{:noreply, vm}
end
@spec execute_func(AlchemyVM, integer, list, :infinity | integer, String.t(), list) :: tuple
defp execute_func(vm, addr, args, gas_limit, fname, opts) do
stack = Enum.reduce(args, [], & [&1 | &2])
# Conditional for Trace
if opts[:trace], do: create_log_timestamp(fname)
{vm, gas, stack} = Executor.create_frame_and_execute(vm, addr, gas_limit, opts, 0, stack)
case vm do
tuple when is_tuple(tuple) -> tuple
_ -> {{:ok, gas, List.first(stack)}, vm}
end
end
defp create_log_timestamp(fname) do
'./trace.log'
|> Path.expand()
|> Path.absname
|> File.write("\n#{DateTime.utc_now()} :: #{fname} ================================\n", [:append])
end
end
|
lib/wasp_vm.ex
| 0.824533
| 0.54698
|
wasp_vm.ex
|
starcoder
|
defmodule Secure.Bcrypt do
@moduledoc """
Elixir wrapper for the OpenBSD bcrypt password hashing algorithm.
"""
@spec salt(pos_integer) :: bitstring
@doc """
Generates a salt with a given work factor.
The work factor defaults to 12 if no factor is given.
"""
def salt(factor \\ 12) do
{:ok, salt} = :bcrypt.gen_salt(factor)
List.to_string(salt)
end
@spec hash(binary, pos_integer) :: binary
@doc """
Hashes a given password with a given work factor.
Bcrypt takes care of salting the hashes for you so this does not need to be
done. The higher the work factor, the longer the password will take to be
hashed and checked.
The work factor defaults to 12 if no factor is given.
"""
def hash(password, factor \\ 12) when is_binary(password) do
{:ok, hash} = :bcrypt.hashpw(password, salt(factor))
List.to_string(hash)
end
@spec match(binary, binary) :: boolean
@doc """
Compares a given password to a hash.
Returns `true` if the password matches, `false` otherwise.
The comparison is done in constant time (based on the hash length).
"""
def match(password, hash) when is_binary(password) and is_binary(hash) do
{:ok, res_hash} = :bcrypt.hashpw(password, String.to_char_list(hash))
Secure.compare(hash, List.to_string(res_hash))
end
@spec change_factor(binary, binary, pos_integer) :: bitstring | {:error, binary}
@doc """
Changes the work factor of a hash.
If a given password matches a given hash, the password is re-hashed again
using the new work_factor.
"""
def change_factor(password, hash, factor) when is_binary(password) and is_binary(hash) do
change_password(password, hash, password, factor)
end
@spec change_password(binary, binary, binary, pos_integer) :: bitstring | {:error, binary}
@doc """
Change a password, only if the previous one was given with it.
If a given old password matches a given old hash, a new password
is hashed using the work factor passed in as an argument. (Defaults to 12)
"""
def change_password(old, hash, new, factor \\ 12) when is_binary(old) and is_binary(hash) and is_binary(new) do
if match(old, hash) do
hash(new, factor)
else
{:error, "Bad Password"}
end
end
end
|
lib/secure/bcrypt.ex
| 0.864411
| 0.635802
|
bcrypt.ex
|
starcoder
|
defmodule Site.FlokiHelpers do
@moduledoc """
Helpers for working with Floki and the parsed HTML it returns.
"""
@typep tree_or_binary :: Floki.html_tree() | binary
@typep visitor :: (tree_or_binary -> tree_or_binary | nil)
@doc """
traverse/2 is the main way of manipulating the parse tree. It recursively
traverses the tree, passing each node to the provided visit_fn visitor
function.
If the visitor function returns nil, traverse continues to descend through
the tree. If the function returns a Floki.html_tree or string, traverse
replaces the node with that result and stops recursively descending down
that branch.
The visit_fn must handle a (non-list) Floki.html_tree node and a binary string.
"""
@spec traverse(tree_or_binary, visitor) :: tree_or_binary
def traverse(str, visit_fn) when is_binary(str) do
visit_fn.(str) || str
end
def traverse(html_list, visit_fn) when is_list(html_list) do
Enum.map(html_list, fn html -> traverse(html, visit_fn) end)
end
def traverse({element, attrs, children} = html, visit_fn) do
visit_fn.(html) || {element, attrs, traverse(children, visit_fn)}
end
@spec add_class(Floki.html_tree(), iodata) :: Floki.html_tree()
def add_class(html_element, []), do: html_element
def add_class({name, attrs, children}, new_class) do
attrs =
case Enum.split_with(attrs, &match?({"class", _}, &1)) do
{[], others} ->
[{"class", new_class} | others]
{[{"class", existing_class}], others} ->
[{"class", [existing_class, " ", new_class]} | others]
end
{name, attrs, children}
end
@spec remove_class(Floki.html_tree(), iodata) :: Floki.html_tree()
def remove_class({name, attrs, children}, old_class) do
attrs =
case Enum.split_with(attrs, &match?({"class", _}, &1)) do
{[], others} ->
others
{[{"class", existing_class}], others} ->
clean_class =
existing_class
|> IO.iodata_to_binary()
|> String.split()
|> Enum.reject(&(&1 == old_class))
[{"class", Enum.join(clean_class, " ")} | others]
end
{name, attrs, children}
end
@spec remove_style_attrs(Floki.html_tree()) :: Floki.html_tree()
def remove_style_attrs({name, attrs, children}) do
{name, Enum.reject(attrs, &remove_attr?(&1, name)), children}
end
@spec remove_attr?({String.t(), String.t()}, String.t()) :: boolean
defp remove_attr?({"height", _}, _), do: true
defp remove_attr?({"width", _}, _), do: true
defp remove_attr?({"style", _}, "iframe"), do: true
defp remove_attr?(_, _), do: false
end
|
apps/site/lib/site/floki_helpers.ex
| 0.797675
| 0.405537
|
floki_helpers.ex
|
starcoder
|
defmodule RedisHash do
@moduledoc """
Offers read and write access to Redis' hash functions,
without caching. All functions directly result in calls to Redis.
Writing data will always overwrite existing values.
"""
require Logger
alias RedisHash
defstruct(
__redis_key__: nil,
__redis_adapter__: nil,
__binary_mode__: true)
@doc """
If local caching is enabled, this will checkout the initial cache state from
redis upon creation.
Options:
* `:binary_mode` - true/false depending on whether data should be put through
`:erlang.term_to_binary` and `:erlang.binary_to_term` respectively or not (default: true)
"""
def new(redis_key), do: new(redis_key, [])
def new(redis_key, opts) when is_atom(redis_key), do: new(redis_key |> to_string, opts)
def new(redis_key, opts) when is_binary(redis_key) do
binary_mode = opts[:binary_mode] || true
adapter = Application.get_env(:ex_sider, :redis_adapter)
%RedisHash{
__redis_key__: redis_key,
__redis_adapter__: adapter,
__binary_mode__: binary_mode}
end
@doc "Delete this hash from the Redis repo."
def delete(%RedisHash{__redis_key__: key, __redis_adapter__: adapter} = container) do
case adapter.command(["DEL", key]) do
{:ok, x} when is_number(x) -> container
other ->
Logger.error "RedisHash failed to call delete/1, got Redis reply: #{inspect other}"
container
end
end
@doc "Pulls all fields of this hash from Redis. Returns nil or a map."
def pull(%RedisHash{__redis_key__: key, __redis_adapter__: adapter, __binary_mode__: binary}) do
case adapter.command(["HGETALL", key]) do
{:ok, nil} -> nil
{:ok, []} -> %{}
{:ok, fields} when is_list(fields) -> extract_map(fields, %{}, binary)
other ->
Logger.error("RedisHash failed to call pull/1, got Redis reply: #{inspect other}")
nil
end
end
@doc """
Push all local keys/values back to the Redis repo.
This simply overwrites whatever is already in there.
Returns ok or error and the reason.
"""
def push(%RedisHash{__redis_key__: key, __redis_adapter__: adapter, __binary_mode__: binary}, %{} = data) do
data = ensure_keys_are_string(data)
fields = data |> Enum.reduce([], fn
{key, value}, acc when binary -> [key, :erlang.term_to_binary(value) | acc]
{key, value}, acc -> [key, value | acc]
end)
case adapter.command(["HMSET", key | fields]) do
{:ok, "OK"} -> :ok
other ->
Logger.error("RedisHash failed to call push/1, got Redis reply: #{inspect other}")
{:error, :redis_hmset_failed}
end
end
defp extract_map([], acc, _binary_mode), do: acc
defp extract_map([key, value | fields], acc, true), do: extract_map(fields, acc |> Map.put(key, :erlang.binary_to_term(value)), true)
defp extract_map([key, value | fields], acc, false), do: extract_map(fields, acc |> Map.put(key, value), false)
defp extract_map(_, acc, _binary_mode) do
Logger.error("RedisHash failed to extract key/values from the listing given by redis.")
acc
end
defp ensure_keys_are_string(%{} = map) do
map
|> Enum.map(
fn {key, val} when is_atom(key) -> {key |> to_string, val}
{key, val} when is_binary(key) -> {key, val}
{key, _val} ->
raise "For maps to work with Redis, their keys must be strings or atoms, and they will always be cast to string. Got: #{inspect key}"
end)
|> Enum.into(%{})
end
end
|
lib/redis_hash.ex
| 0.839273
| 0.523542
|
redis_hash.ex
|
starcoder
|
defmodule Money.ExchangeRates do
@moduledoc """
Implements a behaviour and functions to retrieve exchange rates
from an exchange rate service.
Configuration for the exchange rate service is defined
in a `Money.ExchangeRates.Config` struct. A default
configuration is returned by `Money.ExchangeRates.default_config/0`.
The default configuration is:
config :ex_money,
exchange_rate_service: false,
exchange_rates_retrieve_every: 300_000,
api_module: Money.ExchangeRates.OpenExchangeRates,
callback_module: Money.ExchangeRates.Callback,
preload_historic_rates: nil
log_failure: :warn,
log_info: :info,
log_success: nil
These keys are are defined as follows:
* `:exchange_rate_service` is a boolean that determines whether to
automatically start the exchange rate retrieval service.
The default it false.
* `:exchange_rates_retrieve_every` defines how often the exchange
rates are retrieved in milliseconds. The default is 5 minutes
(300,000 milliseconds)
* `:api_module` identifies the module that does the retrieval of
exchange rates. This is any module that implements the
`Money.ExchangeRates` behaviour. The default is
`Money.ExchangeRates.OpenExchangeRates`
* `:callback_module` defines a module that follows the
Money.ExchangeRates.Callback behaviour whereby the function
`rates_retrieved/2` is invoked after every successful retrieval
of exchange rates. The default is `Money.ExchangeRates.Callback`.
* `:preload_historic_rates` defines a date or a date range,
that will be requested when the exchange rate service starts up.
The date or date range should be specified as either a `Date.t`
or a `Date.Range.t` or a tuple of `{Date.t, Date.t}` representing
the `from` and `to` dates for the rates to be retrieved. The
default is `nil` meaning no historic rates are preloaded.
* `:log_failure` defines the log level at which api retrieval
errors are logged. The default is `:warn`
* `:log_success` defines the log level at which successful api
retrieval notifications are logged. The default is `nil` which
means no logging.
* `:log_info` defines the log level at which service startup messages
are logged. The default is `:info`.
* `:retriever_options` is available for exchange rate retriever
module developers as a place to add retriever-specific configuration
information. This information should be added in the `init/1`
callback in the retriever module. See `Money.ExchangeRates.OpenExchangeRates.init/1`
for an example.
Keys can also be configured to retrieve values from environment
variables. This lookup is done at runtime to facilitate deployment
strategies. If the value of a configuration key is
`{:system, "some_string"}` then "some_string" is interpreted as
an environment variable name which is passed to System.get_env/2.
An example configuration might be:
config :ex_money,
exchange_rate_service: {:system, "RATE_SERVICE"},
exchange_rates_retrieve_every: {:system, "RETRIEVE_EVERY"},
## Open Exchange Rates
If you plan to use the provided Open Exchange Rates module
to retrieve exchange rates then you should also provide the additional
configuration key for `app_id`:
config :ex_money,
open_exchange_rates_app_id: "your_app_id"
or configure it via environment variable:
config :ex_money,
open_exchange_rates_app_id: {:system, "OPEN_EXCHANGE_RATES_APP_ID"}
The default exchange rate retrieval module is provided in
`Money.ExchangeRates.OpenExchangeRates` which can be used
as a example to implement your own retrieval module for
other services.
## Managing the configuration at runtime
During exchange rate service startup, the function `init/1` is called
on the configuration exchange rate retrieval module. This module is
expected to return an updated configuration allowing a developer to
customise how the configuration is to be managed. See the implementation
at `Money.ExchangeRates.OpenExchangeRates.init/1` for an example.
"""
@type t :: %{Money.currency_code() => Decimal.t()}
@doc """
Invoked to return the latest exchange rates from the configured
exchange rate retrieval service.
* `config` is an `%Money.ExchangeRataes.Config{}` struct
Returns `{:ok, map_of_rates}` or `{:error, reason}`
"""
@callback get_latest_rates(config :: Money.ExchangeRates.Config.t()) ::
{:ok, map()} | {:error, binary}
@doc """
Invoked to return the historic exchange rates from the configured
exchange rate retrieval service.
* `config` is an `%Money.ExchangeRataes.Config{}` struct
Returns `{:ok, map_of_historic_rates}` or `{:error, reason}`
"""
@callback get_historic_rates(Date.t(), config :: Money.ExchangeRates.Config.t()) ::
{:ok, map()} | {:error, binary}
@doc """
Decode the body returned from the API request and
return a map of rates. THe map of rates must have
an upcased atom key representing an ISO 4217 currency
code and the value must be a Decimal number.
"""
@callback decode_rates(any) :: map()
@doc """
Given the default configuration, returns an updated configuration at runtime
during exchange rates service startup.
This callback is optional. If the callback is not defined, the default
configuration returned by `Money.ExchangeRates.default_config/0` is used.
* `config` is the configuration returned by `Money.ExchangeRates.default_config/0`
The callback is expected to return a `%Money.ExchangeRates.Config.t()` struct
which may have been updated. The configuration key `:retriever_options` is
available for any service-specific configuration.
"""
@callback init(config :: Money.ExchangeRates.Config.t()) :: Money.ExchangeRates.Config.t()
@optional_callbacks init: 1
require Logger
import Money.ExchangeRates.Cache
alias Money.ExchangeRates.Retriever
@default_retrieval_interval :never
@default_callback_module Money.ExchangeRates.Callback
@default_api_module Money.ExchangeRates.OpenExchangeRates
@default_cache_module Money.ExchangeRates.Cache.Ets
@doc """
Returns the configuration for `ex_money` including the
configuration merged from the configured exchange rates
retriever module.
"""
def config do
api_module = default_config().api_module
if function_exported?(api_module, :init, 1) do
api_module.init(default_config())
else
default_config()
end
end
# Defines the configuration for the exchange rates mechanism.
defmodule Config do
@type t :: %__MODULE__{
retrieve_every: non_neg_integer | nil,
api_module: module() | nil,
callback_module: module() | nil,
log_levels: map(),
preload_historic_rates: Date.t() | Date.Range.t() | {Date.t(), Date.t()} | nil,
retriever_options: map() | nil,
cache_module: module() | nil,
verify_peer: boolean()
}
defstruct retrieve_every: nil,
api_module: nil,
callback_module: nil,
log_levels: %{},
preload_historic_rates: nil,
retriever_options: nil,
cache_module: nil,
verify_peer: true
end
@doc """
Returns the default configuration for the exchange rates retriever.
"""
def default_config do
%Config{
api_module: Money.get_env(:api_module, @default_api_module, :module),
callback_module: Money.get_env(:callback_module, @default_callback_module, :module),
preload_historic_rates: Money.get_env(:preload_historic_rates, nil),
cache_module: Money.get_env(:exchange_rates_cache_module, @default_cache_module, :module),
retrieve_every:
Money.get_env(:exchange_rates_retrieve_every, @default_retrieval_interval, :maybe_integer),
log_levels: %{
success: Money.get_env(:log_success, nil),
failure: Money.get_env(:log_failure, :warn),
info: Money.get_env(:log_info, :info)
},
verify_peer: Money.get_env(:verify_peer, true, :boolean)
}
end
@doc """
Return the latest exchange rates.
Returns:
* `{:ok, rates}` if exchange rates are successfully retrieved. `rates` is a map of
exchange rates.
* `{:error, reason}` if no exchange rates can be returned.
This function looks up the latest exchange rates in a an ETS table
called `:exchange_rates`. The actual retrieval of rates is requested
through `Money.ExchangeRates.Retriever.latest_rates/0`.
"""
@spec latest_rates() :: {:ok, map()} | {:error, {Exception.t(), binary}}
def latest_rates do
case cache().latest_rates() do
{:ok, rates} -> {:ok, rates}
{:error, _} -> Retriever.latest_rates()
end
end
@doc """
Return historic exchange rates.
* `date` is a date returned by `Date.new/3` or any struct with the
elements `:year`, `:month` and `:day`.
Returns:
* `{:ok, rates}` if exchange rates are successfully retrieved. `rates` is a map of
exchange rates.
* `{:error, reason}` if no exchange rates can be returned.
**Note;** all dates are expected to be in the Calendar.ISO calendar
This function looks up the historic exchange rates in a an ETS table
called `:exchange_rates`. The actual retrieval of rates is requested
through `Money.ExchangeRates.Retriever.historic_rates/1`.
"""
@spec historic_rates(Date.t()) :: {:ok, map()} | {:error, {Exception.t(), binary}}
def historic_rates(date) do
case cache().historic_rates(date) do
{:ok, rates} -> {:ok, rates}
{:error, _} -> Retriever.historic_rates(date)
end
end
@doc """
Returns `true` if the latest exchange rates are available
and false otherwise.
"""
@spec latest_rates_available?() :: boolean
def latest_rates_available? do
case cache().latest_rates() do
{:ok, _rates} -> true
_ -> false
end
end
@doc """
Return the timestamp of the last successful retrieval of exchange rates or
`{:error, reason}` if no timestamp is known.
## Example
Money.ExchangeRates.last_updated
#> {:ok,
%DateTime{calendar: Calendar.ISO, day: 20, hour: 12, microsecond: {731942, 6},
minute: 36, month: 11, second: 6, std_offset: 0, time_zone: "Etc/UTC",
utc_offset: 0, year: 2016, zone_abbr: "UTC"}}
"""
@spec last_updated() :: {:ok, DateTime.t()} | {:error, {Exception.t(), binary}}
def last_updated do
cache().last_updated()
end
end
|
lib/money/exchange_rates.ex
| 0.926844
| 0.637962
|
exchange_rates.ex
|
starcoder
|
defmodule Exonerate.Filter.DependentSchemas do
@moduledoc false
# NB "dependentSchemas" is just a repackaging of "dependencies" except only permitting the
# maps (specification of full schema to be applied to the object)
@behaviour Exonerate.Filter
@derive Exonerate.Compiler
@derive {Inspect, except: [:context]}
alias Exonerate.Type.Object
alias Exonerate.Validator
defstruct [:context, :dependencies]
import Validator, only: [fun: 2]
def parse(artifact = %Object{context: context}, %{"dependentSchemas" => deps}) do
deps = deps
|> Enum.reject(&(elem(&1, 1) == true)) # as an optimization, just ignore {key, true}
|> Map.new(fn
{k, false} -> {k, false} # might be optimizable as a filter. Not done here.
{k, schema} when is_map(schema) ->
{k, Validator.parse(
context.schema,
[k, "dependentSchemas" | context.pointer],
authority: context.authority,
format: context.format,
draft: context.draft
)}
end)
%{
artifact |
pipeline: [fun(artifact, "dependentSchemas") | artifact.pipeline],
filters: [%__MODULE__{context: context, dependencies: deps} | artifact.filters]
}
end
def compile(filter = %__MODULE__{dependencies: deps}) do
{pipeline, children} = deps
|> Enum.map(fn
{key, false} ->
{fun(filter, ["dependentSchemas", key]),
quote do
defp unquote(fun(filter, ["dependentSchemas", key]))(value, path) when is_map_key(value, unquote(key)) do
Exonerate.mismatch(value, Path.join(path, unquote(key)))
end
defp unquote(fun(filter, ["dependentSchemas", key]))(value, _), do: value
end}
{key, schema} ->
{fun(filter, ["dependentSchemas", ":" <> key]),
quote do
defp unquote(fun(filter, ["dependentSchemas", ":" <> key]))(value, path) when is_map_key(value, unquote(key)) do
unquote(fun(filter, ["dependentSchemas", key]))(value, path)
end
defp unquote(fun(filter, ["dependentSchemas", ":" <> key]))(value, _), do: value
unquote(Validator.compile(schema))
end}
end)
|> Enum.unzip
{[], [
quote do
defp unquote(fun(filter, "dependentSchemas"))(value, path) do
Exonerate.pipeline(value, path, unquote(pipeline))
:ok
end
end
] ++ children}
end
end
|
lib/exonerate/filter/dependent_schemas.ex
| 0.731251
| 0.458894
|
dependent_schemas.ex
|
starcoder
|
defmodule Bamboo.SendGridHelper do
@moduledoc """
Functions for using features specific to Sendgrid.
## Example
email
|> with_template("80509523-83de-42b6-a2bf-54b7513bd2aa")
|> substitute("%name%", "<NAME>")
|> substitute("%location%", "Westeros")
"""
alias Bamboo.Email
@field_name :send_grid_template
@categories :categories
@asm_group_id :asm_group_id
@bypass_list_management :bypass_list_management
@google_analytics_enabled :google_analytics_enabled
@google_analytics_utm_params :google_analytics_utm_params
@additional_personalizations :additional_personalizations
@allowed_google_analytics_utm_params ~w(utm_source utm_medium utm_campaign utm_term utm_content)a
@send_at_field :sendgrid_send_at
@ip_pool_name_field :ip_pool_name
@doc """
Specify the template for SendGrid to use for the context of the substitution
tags.
## Example
email
|> with_template("80509523-83de-42b6-a2bf-54b7513bd2aa")
"""
def with_template(email, template_id) do
template = Map.get(email.private, @field_name, %{})
email
|> Email.put_private(@field_name, set_template(template, template_id))
end
@doc """
Add a tag to the list of substitutions in the SendGrid template.
The tag must be a `String.t` due to SendGrid using special characters to wrap
tags in the template.
## Example
email
|> substitute("%name%", "<NAME>")
"""
def substitute(email, tag, value) do
if is_binary(tag) do
template = Map.get(email.private, @field_name, %{})
email
|> Email.put_private(@field_name, add_substitution(template, tag, value))
else
raise "expected the tag parameter to be of type binary, got #{tag}"
end
end
@doc """
An array of category names for this email. A maximum of 10 categories can be assigned to an email.
Duplicate categories will be ignored and only unique entries will be sent.
## Example
email
|> with_categories("campaign-12345")
"""
def with_categories(email, categories) when is_list(categories) do
categories =
(Map.get(email.private, @categories, []) ++ categories)
|> MapSet.new()
|> MapSet.to_list()
email
|> Email.put_private(@categories, Enum.slice(categories, 0, 10))
end
def with_categories(_email, _categories) do
raise "expected a list of category strings"
end
@doc """
Add a property to the list of dynamic template data in the SendGrid template.
This will be added to the request as:
```
"personalizations":[
{
"to":[
{
"email":"<EMAIL>"
}
],
"dynamic_template_data":{
"total":"$ 239.85",
}
}
],
```
The tag can be of any type since SendGrid allows you to use Handlebars in its templates
## Example
email
|> add_dynamic_field("name", "<NAME>")
"""
def add_dynamic_field(email, field, value) when is_atom(field),
do: add_dynamic_field(email, Atom.to_string(field), value)
def add_dynamic_field(email, field, value) when is_binary(field) do
template = Map.get(email.private, @field_name, %{})
email
|> Email.put_private(@field_name, add_dynamic_field_to_template(template, field, value))
end
def add_dynamic_field(_email, field, _value),
do: raise("expected the name parameter to be of type binary or atom, got #{field}")
@doc """
An integer id for an ASM (Advanced Suppression Manager) group that this email should belong to.
This can be used to let recipients unsubscribe from only a certain type of communication.
## Example
email
|> with_asm_group_id(1234)
"""
def with_asm_group_id(email, asm_group_id) when is_integer(asm_group_id) do
email
|> Email.put_private(@asm_group_id, asm_group_id)
end
def with_asm_group_id(_email, asm_group_id) do
raise "expected the asm_group_id parameter to be an integer, got #{asm_group_id}"
end
@doc """
A boolean setting to instruct SendGrid to bypass list management for this
email. If enabled, SendGrid will ignore any email supression (such as
unsubscriptions, bounces, spam filters) for this email. This is useful for
emails that all users must receive, such as Terms of Service updates, or
password resets.
## Example
email
|> with_bypass_list_management(true)
"""
def with_bypass_list_management(email, enabled) when is_boolean(enabled) do
email
|> Email.put_private(@bypass_list_management, enabled)
end
def with_bypass_list_management(_email, enabled) do
raise "expected bypass_list_management parameter to be a boolean, got #{enabled}"
end
@doc """
Instruct SendGrid to enable or disable Google Analytics tracking, and
optionally set the UTM parameters for it. This is useful if you need to
control UTM tracking parameters on an individual email basis.
## Example
email
|> with_google_analytics(true, %{utm_source: "email", utm_campaign: "campaign"})
email
|> with_google_analytics(false)
"""
def with_google_analytics(email, enabled, utm_params \\ %{})
def with_google_analytics(email, enabled, utm_params)
when is_boolean(enabled) do
utm_params =
utm_params
|> Map.take(@allowed_google_analytics_utm_params)
email
|> Email.put_private(@google_analytics_enabled, enabled)
|> Email.put_private(@google_analytics_utm_params, utm_params)
end
def with_google_analytics(_email, _enabled, _utm_params) do
raise "expected with_google_analytics enabled parameter to be a boolean"
end
@doc """
Schedule a time for SendGrid to deliver the email.
Note that if the time is in the past, SendGrid will immediately deliver the
email.
## Example
{:ok, delivery_time, _} = DateTime.from_iso8601("2020-01-01T00:00:00Z")
email
|> with_send_at(delivery_time)
"""
@spec with_send_at(%Email{}, %DateTime{} | integer()) :: %Email{}
def with_send_at(email, %DateTime{} = time) do
timestamp = DateTime.to_unix(time)
email
|> Email.put_private(@send_at_field, timestamp)
end
def with_send_at(email, unix_timestamp) when is_integer(unix_timestamp) do
email
|> Email.put_private(@send_at_field, unix_timestamp)
end
def with_send_at(_email, _time) do
raise "expected with_send_at time parameter to be a DateTime or unix timestamp"
end
@doc """
Add SendGrid personalizations
Each personalization can have the following fields: `to`, `cc`, `bcc`,
`subject`, `headers`, `substitutions`, `custom_args`, or `send_at`.
Settings from the top level of the email (e.g., `Email |> with_send_at`)
will not be applied to each personalization. If you want multiple
personalizations with common properties, it is recommended to generate the
list from a common base value and simply do not set the corresponding
top-level fields.
## Example:
base_personalization = %{
bcc: [%{"email" => "<EMAIL>", "name" => "BCC"}],
subject: "Here is your email"
}
personalizations =
Enum.map(
[
%{to: "<EMAIL>"},
%{to: "<EMAIL>", send_at: 1_580_485_560}
],
&Map.merge(base_personalization, &1)
)
email =
new_email()
|> Email.put_header("Reply-To", "<EMAIL>")
|> Bamboo.SendGridHelper.add_personalizations(personalizations)
"""
@spec add_personalizations(Bamboo.Email.t(), [map]) :: Bamboo.Email.t()
def add_personalizations(email, personalizations) when is_list(personalizations) do
email
|> Email.put_private(@additional_personalizations, personalizations)
end
defp set_template(template, template_id) do
template
|> Map.merge(%{template_id: template_id})
end
defp add_substitution(template, tag, value) do
template
|> Map.update(:substitutions, %{tag => value}, fn substitutions ->
Map.merge(substitutions, %{tag => value})
end)
end
defp add_dynamic_field_to_template(template, field, value) do
template
|> Map.update(:dynamic_template_data, %{field => value}, fn dynamic_data ->
Map.merge(dynamic_data, %{field => value})
end)
end
@doc """
Specify the ip pool name.
## Example
email
|> with_ip_pool_name("my-ip-pool-name")
"""
def with_ip_pool_name(email, ip_pool_name) do
email
|> Email.put_private(@ip_pool_name_field, ip_pool_name)
end
end
|
lib/bamboo/adapters/send_grid_helper.ex
| 0.840259
| 0.631537
|
send_grid_helper.ex
|
starcoder
|
defmodule DiodeClient.ABI do
alias DiodeClient.{Hash, Wallet}
use DiodeClient.Log
import Wallet
def encode_args(types, values) when is_list(types) and is_list(values) do
encode_data(types, values)
|> :erlang.iolist_to_binary()
end
def decode_types(types, data) do
{ret, ""} =
Enum.reduce(types, {[], data}, fn type, {ret, rest} ->
{value, rest} = decode(type, rest)
{ret ++ [value], rest}
end)
ret
end
def decode_revert(<<"">>) do
{:evmc_revert, ""}
end
# Decoding "Error(string)" type revert messages
def decode_revert(
<<8, 195, 121, 160, 32::unsigned-size(256), length::unsigned-size(256), rest::binary>>
) do
{:evmc_revert, binary_part(rest, 0, length)}
end
def decode_revert(other) do
log("DEBUG: decode_revert(~0p)", [other])
{:evmc_revert, "blubb"}
end
def encode_spec(name, types \\ []) do
signature = "#{name}(#{Enum.join(types, ",")})"
binary_part(Hash.keccak_256(signature), 0, 4)
end
def encode_call(name, types \\ [], values \\ []) do
fun = encode_spec(name, types)
args = encode_args(types, values)
fun <> args
end
def do_encode_data(type, value) do
subtype = subtype(type)
if subtype != nil do
{types, values, len} = dynamic(type, value)
ret = encode_data(types, values)
{"", [encode("uint", len), ret]}
else
{encode(type, value), ""}
end
end
def encode_data(subtypes, values) do
values =
Enum.zip([subtypes, values])
|> Enum.map(fn {type, entry} ->
do_encode_data(type, entry)
end)
{head, body, _} =
Enum.reduce(values, {[], [], 32 * length(subtypes)}, fn
{"", body}, {h, b, o} ->
{h ++ [encode("uint", o)], b ++ [body], o + :erlang.iolist_size(body)}
{head, _}, {h, b, o} ->
{h ++ [head], b, o}
end)
[head, body]
end
@doc """
subtype returns the individual element type of an dynamic/array
"""
@spec subtype(binary) :: false | binary
def subtype(type) do
cond do
String.ends_with?(type, "[]") -> binary_part(type, 0, byte_size(type) - 2)
type == "bytes" -> "uint8"
type == "string" -> "uint8"
true -> nil
end
end
def dynamic(type, values) when is_list(values) do
{List.duplicate(subtype(type), length(values)), values, length(values)}
end
def dynamic(type, {:call, name, types, args}) do
dynamic(type, encode_call(name, types, args))
end
def dynamic(_type, value) when is_binary(value) do
values = value <> <<0::unsigned-size(248)>>
values =
binary_part(values, 0, div(byte_size(values), 32) * 32)
|> :erlang.binary_to_list()
|> Enum.chunk_every(32)
|> Enum.map(&:erlang.iolist_to_binary/1)
{List.duplicate("bytes32", length(values)), values, byte_size(value)}
end
def encode(format, nil), do: encode(format, 0)
# uint<M>: unsigned integer type of M bits, 0 < M <= 256, M % 8 == 0. e.g. uint32, uint8, uint256.
# int<M>: twoβs complement signed integer type of M bits, 0 < M <= 256, M % 8 == 0.
# address: equivalent to uint160, except for the assumed interpretation and language typing. For computing the function selector, address is used.
# uint, int: synonyms for uint256, int256 respectively. For computing the function selector, uint256 and int256 have to be used.
# bool: equivalent to uint8 restricted to the values 0 and 1. For computing the function selector, bool is used.
# fixed<M>x<N>: signed fixed-point decimal number of M bits, 8 <= M <= 256, M % 8 ==0, and 0 < N <= 80, which denotes the value v as v / (10 ** N).
# ufixed<M>x<N>: unsigned variant of fixed<M>x<N>.
# fixed, ufixed: synonyms for fixed128x18, ufixed128x18 respectively. For computing the function selector, fixed128x18 and ufixed128x18 have to be used.
# bytes<M>: binary type of M bytes, 0 < M <= 32.
# function: an address (20 bytes) followed by a function selector (4 bytes). Encoded identical to bytes24.
for bit <- 1..32 do
Module.eval_quoted(
__MODULE__,
Code.string_to_quoted("""
def encode("uint#{bit * 8}", value), do: <<value :: unsigned-size(256)>>
def encode("int#{bit * 8}", value), do: <<value :: signed-size(256)>>
def encode("bytes#{bit}", <<value :: binary>>), do: <<:binary.decode_unsigned(value) :: unsigned-size(256)>>
def encode("bytes#{bit}", value) when is_integer(value), do: <<value :: unsigned-size(256)>>
""")
)
end
def encode("uint", value), do: encode("uint256", value)
def encode("int", value), do: encode("int256", value)
def encode("address", value) when is_integer(value), do: encode("uint160", value)
def encode("address", value) when is_binary(value), do: encode("bytes20", value)
def encode("address", value = wallet()), do: encode("bytes20", Wallet.address!(value))
def encode("bool", true), do: encode("uint8", 1)
def encode("bool", false), do: encode("uint8", 0)
def encode("bool", value), do: encode("uint8", value)
def encode("function", {address, name}),
do: encode("bytes24", encode("address", address) <> encode_spec(name))
def encode("function", {address, name, types}),
do: encode("bytes24", encode("address", address) <> encode_spec(name, types))
def encode("function", value), do: encode("bytes24", value)
# next one should be cut off at bit limit
for bit <- 1..32 do
Module.eval_quoted(
__MODULE__,
Code.string_to_quoted("""
def decode("uint#{bit * 8}", <<value :: unsigned-size(256), rest :: binary>>), do: {value, rest}
def decode("int#{bit * 8}", <<value :: signed-size(256), rest :: binary>>), do: {value, rest}
def decode("bytes#{bit}", <<value :: binary-size(#{bit}), _ :: binary-size(#{32 - bit}), rest :: binary()>>), do: {value, rest}
""")
)
end
def decode("uint", value), do: decode("uint256", value)
def decode("int", value), do: decode("int256", value)
def decode("address", value), do: decode("bytes20", value)
def decode("bool", value), do: decode("uint8", value)
end
|
lib/diode_client/abi.ex
| 0.640523
| 0.486819
|
abi.ex
|
starcoder
|
defmodule Snitch.Data.Schema.PackageItem do
@moduledoc """
Models a PackageItem, a `Package` is composed of many `PackageItem`s.
## Fulfillment
There are two kinds of fulfillments:
1. `immediate`, there is enough stock "on hand" at the origin stock location.
- `:backordered` is set to `false`
2. `deffered`, there is not enough stock "on hand" (possibly even none),
***and*** the origin stock location allows backorders on the requested
variant/product.
- `:backordered` is set to `true`
For a detailed explanation of how backorders work, please refer the [guide on
setting up stock locations](#).
> The *origin stock location* is the location which would ship the package
containing this package-item.
### Immediate Fulfillment
If the `:backordered?` field is `false`, the package item immediately fulfills
the line item.
This also implies the following:
```
package_item.delta = 0
```
### Deferred Fulfillment
If the `:backordered?` field is `true`, the package item _will fulfill_ the
line item, in the future. The (parent) package cannot be immediately shipped.
This also implies the following:
```
package_item.delta = package_item.line_item.quantity - currently_on_hand
```
"""
use Snitch.Data.Schema
alias Ecto.Nanoid
alias Snitch.Data.Schema.{LineItem, Package, Product}
@typedoc """
Every fulfilled `LineItem` get shipped in as `PackageItem` in a `Package`.
## Fields
### `:quantity`
The number of units (of this item) that are currently "on hand" at the stock
location. The package can be shipped only when this becomes equal to the
quantity ordered.
When the item is immediately fulfilled, this is same as the line_item's
quantity.
Otherwise, this is the number of units that are currently "on hand" at the
origin stock location.
### `:delta`
The difference between the `:quantity` and the number of units "on
hand".
### `:tax`
The tax levied over (or included in) the cost of the line item, as applicable
when the line item is sold from the `:origin` stock location.
This does not include any shipping tax components.
### `:shipping_tax`
The sum of all shipping taxes that apply for the shipping of this item from
the `origin` stock location.
"""
@type t :: %__MODULE__{}
# TODO: :backordered could be made a virtual field...
schema "snitch_package_items" do
field(:number, Nanoid, autogenerate: true)
field(:state, :string)
field(:quantity, :integer, default: 0)
# The field should be tracked in some other way
field(:delta, :integer, default: 0)
field(:backordered?, :boolean)
field(:tax, Money.Ecto.Composite.Type)
field(:shipping_tax, Money.Ecto.Composite.Type)
belongs_to(:product, Product)
belongs_to(:line_item, LineItem)
belongs_to(:package, Package)
has_one(:order, through: [:package, :order])
timestamps()
end
@create_fields ~w(state delta quantity line_item_id product_id package_id tax shipping_tax)a
@required_fields ~w(state quantity line_item_id product_id tax)a
@update_fields ~w(state quantity delta tax shipping_tax)a
@doc """
Returns a `PackageItem` changeset to create a new `package_item`.
"""
@spec create_changeset(t, map) :: Ecto.Changeset.t()
def create_changeset(%__MODULE__{} = package_item, params) do
package_item
|> cast(params, @create_fields)
|> validate_required(@required_fields)
|> foreign_key_constraint(:line_item_id)
|> foreign_key_constraint(:product_id)
|> foreign_key_constraint(:package_id)
|> unique_constraint(:number)
|> common_changeset()
end
@doc """
Returns a `PackageItem` changeset to update the `package_item`.
"""
@spec update_changeset(t, map) :: Ecto.Changeset.t()
def update_changeset(%__MODULE__{} = package_item, params) do
package_item
|> cast(params, @update_fields)
|> common_changeset()
end
defp common_changeset(package_item_changeset) do
package_item_changeset
|> validate_number(:quantity, greater_than: -1)
|> validate_number(:delta, greater_than: -1)
|> validate_amount(:tax)
|> validate_amount(:shipping_tax)
|> set_backordered()
end
defp set_backordered(%Ecto.Changeset{valid?: true} = changeset) do
case fetch_field(changeset, :delta) do
{_, delta} when delta == 0 ->
put_change(changeset, :backordered?, false)
{_, delta} when delta > 0 ->
put_change(changeset, :backordered?, true)
_ ->
changeset
end
end
defp set_backordered(%Ecto.Changeset{} = cs), do: cs
end
|
apps/snitch_core/lib/core/data/schema/package_item.ex
| 0.864053
| 0.886862
|
package_item.ex
|
starcoder
|
defmodule AdventOfCode.Y2021.Day1 do
@moduledoc """
--- Day 1: Sonar Sweep ---
You're minding your own business on a ship at sea when the overboard alarm goes off! You rush to see if you can help. Apparently, one of the Elves tripped and accidentally sent the sleigh keys flying into the ocean!
Before you know it, you're inside a submarine the Elves keep ready for situations like this. It's covered in Christmas lights (because of course it is), and it even has an experimental antenna that should be able to track the keys if you can boost its signal strength high enough; there's a little meter that indicates the antenna's signal strength by displaying 0-50 stars.
Your instincts tell you that in order to save Christmas, you'll need to get all fifty stars by December 25th.
Collect stars by solving puzzles. Two puzzles will be made available on each day in the Advent calendar; the second puzzle is unlocked when you complete the first. Each puzzle grants one star. Good luck!
As the submarine drops below the surface of the ocean, it automatically performs a sonar sweep of the nearby sea floor. On a small screen, the sonar sweep report (your puzzle input) appears: each line is a measurement of the sea floor depth as the sweep looks further and further away from the submarine.
For example, suppose you had the following report:
199
200
208
210
200
207
240
269
260
263
This report indicates that, scanning outward from the submarine, the sonar sweep found depths of 199, 200, 208, 210, and so on.
The first order of business is to figure out how quickly the depth increases, just so you know what you're dealing with - you never know if the keys will get carried into deeper water by an ocean current or a fish or something.
To do this, count the number of times a depth measurement increases from the previous measurement. (There is no measurement before the first measurement.) In the example above, the changes are as follows:
```
199 (N/A - no previous measurement)
200 (increased)
208 (increased)
210 (increased)
200 (decreased)
207 (increased)
240 (increased)
269 (increased)
260 (decreased)
263 (increased)
```
In this example, there are 7 measurements that are larger than the previous measurement.
How many measurements are larger than the previous measurement?
"""
@doc """
Day 1 - Part 1
## Examples
iex> AdventOfCode.Y2021.Day1.part1()
1548
"""
def fetch_inputs() do
AdventOfCode.etl_file("lib/y_2021/d1/input.txt", &parse_row/1)
end
def parse_row(s) do
get_int(Integer.parse(s), s)
end
def part1 do
inputs = fetch_inputs()
iterate(nil, inputs, 0)
end
defp get_int({n, ""}, _), do: n
defp compare_and_count(nil, _, count), do: count
defp compare_and_count(_, nil, count), do: count
defp compare_and_count(prev, next, count) when next > prev, do: count + 1
defp compare_and_count(_, _, count), do: count
defp iterate(_, [], count), do: count
defp iterate(previous, [next | tail], count) do
ncount = compare_and_count(previous, next, count)
iterate(next, tail, ncount)
end
@doc """
Day 1 - Part 2
--- Part Two ---
Considering every single measurement isn't as useful as you expected: there's just too much noise in the data.
Instead, consider sums of a three-measurement sliding window. Again considering the above example:
199 A
200 A B
208 A B C
210 B C D
200 E C D
207 E F D
240 E F G
269 F G H
260 G H
263 H
Start by comparing the first and second three-measurement windows. The measurements in the first window are marked A (199, 200, 208); their sum is 199 + 200 + 208 = 607. The second window is marked B (200, 208, 210); its sum is 618. The sum of measurements in the second window is larger than the sum of the first, so this first comparison increased.
Your goal now is to count the number of times the sum of measurements in this sliding window increases from the previous sum. So, compare A with B, then compare B with C, then C with D, and so on. Stop when there aren't enough measurements left to create a new three-measurement sum.
In the above example, the sum of each three-measurement window is as follows:
A: 607 (N/A - no previous sum)
B: 618 (increased)
C: 618 (no change)
D: 617 (decreased)
E: 647 (increased)
F: 716 (increased)
G: 769 (increased)
H: 792 (increased)
In this example, there are 5 sums that are larger than the previous sum.
Consider sums of a three-measurement sliding window. How many sums are larger than the previous sum?
## Examples
iex> AdventOfCode.Y2021.Day1.part2()
1589
"""
def part2() do
inputs = fetch_inputs()
iterate_window(nil, nil, nil, inputs, 0)
end
def iterate_window(_, _, _, [], count), do: count
def iterate_window(p1, p2, p3, [n1 | tail], count)
when is_integer(p1) and is_integer(p2) and is_integer(p3) do
s1 = p1 + p2 + p3
s2 = p2 + p3 + n1
new_count = window_compare(s1, s2, count)
iterate_window(p2, p3, n1, tail, new_count)
end
def iterate_window(_p1, p2, p3, [n1 | tail], count) do
iterate_window(p2, p3, n1, tail, count)
end
def window_compare(s1, s2, count) when s2 > s1, do: count + 1
def window_compare(_, _, count), do: count
end
|
lib/y_2021/d1/day1.ex
| 0.850173
| 0.929312
|
day1.ex
|
starcoder
|
defmodule Plaid.Accounts do
@moduledoc """
Functions for Plaid `accounts` endpoint.
"""
import Plaid, only: [make_request_with_cred: 4, validate_cred: 1]
alias Plaid.Utils
@derive Jason.Encoder
defstruct accounts: [], item: nil, request_id: nil
@type t :: %__MODULE__{
accounts: [Plaid.Accounts.Account.t()],
item: Plaid.Item.t(),
request_id: String.t()
}
@type params :: %{required(atom) => String.t() | [String.t()] | map}
@type config :: %{required(atom) => String.t()}
@endpoint :accounts
defmodule Account do
@moduledoc """
Plaid Account data structure.
"""
@derive Jason.Encoder
defstruct account_id: nil,
balances: nil,
owners: nil,
name: nil,
mask: nil,
official_name: nil,
type: nil,
subtype: nil
@type t :: %__MODULE__{
account_id: String.t(),
balances: Plaid.Accounts.Account.Balance.t(),
owners: [Plaid.Accounts.Account.Owner.t()],
name: String.t(),
mask: String.t(),
official_name: String.t(),
type: String.t(),
subtype: String.t()
}
defmodule Balance do
@moduledoc """
Plaid Account Balance data structure.
"""
@derive Jason.Encoder
defstruct available: nil,
current: nil,
limit: nil,
iso_currency_code: nil,
unofficial_currency_code: nil
@type t :: %__MODULE__{
available: float,
current: float,
limit: float,
iso_currency_code: String.t(),
unofficial_currency_code: String.t()
}
end
defmodule Owner do
@moduledoc """
Plaid Account Owner data structure.
"""
@derive Jason.Encoder
defstruct addresses: nil,
emails: nil,
names: nil,
phone_numbers: nil
@type t :: %__MODULE__{
addresses: [Plaid.Accounts.Account.Owner.Address.t()],
emails: [Plaid.Accounts.Account.Owner.Email.t()],
names: [String.t()],
phone_numbers: [Plaid.Accounts.Account.Owner.PhoneNumber.t()]
}
defmodule Address do
@moduledoc """
Plaid Account Owner Address data structure.
"""
@derive Jason.Encoder
defstruct data: %{city: nil, region: nil, street: nil, postal_code: nil, country: nil},
primary: false
@type t :: %__MODULE__{
data: %{
city: String.t(),
region: String.t(),
street: String.t(),
postal_code: String.t(),
country: String.t()
},
primary: boolean()
}
end
defmodule Email do
@moduledoc """
Plaid Account Owner Email data structure.
"""
@derive Jason.Encoder
defstruct data: nil,
primary: false,
type: nil
@type t :: %__MODULE__{
data: String.t(),
primary: boolean(),
type: String.t()
}
end
defmodule PhoneNumber do
@moduledoc """
Plaid Account Owner PhoneNumber data structure.
"""
@derive Jason.Encoder
defstruct data: nil,
primary: false,
type: nil
@type t :: %__MODULE__{
data: String.t(),
primary: boolean(),
type: String.t()
}
end
end
end
@doc """
Gets account data associated with Item.
Parameters
```
%{access_token: "access-token"}
```
"""
@spec get(params, config | nil) :: {:ok, Plaid.Accounts.t()} | {:error, Plaid.Error.t()}
def get(params, config \\ %{}) do
config = validate_cred(config)
endpoint = "#{@endpoint}/get"
make_request_with_cred(:post, endpoint, config, params)
|> Utils.handle_resp(@endpoint)
end
@doc """
Gets balance for specifed accounts associated with Item.
Parameters
```
%{access_token: "access-token", options: %{account_ids: ["account-id"]}}
```
"""
@spec get_balance(params, config | nil) :: {:ok, Plaid.Accounts.t()} | {:error, Plaid.Error.t()}
def get_balance(params, config \\ %{}) do
config = validate_cred(config)
endpoint = "#{@endpoint}/balance/get"
make_request_with_cred(:post, endpoint, config, params)
|> Utils.handle_resp(@endpoint)
end
end
|
lib/plaid/accounts.ex
| 0.789112
| 0.608158
|
accounts.ex
|
starcoder
|
defmodule Ello.Stream.Slop do
@base_slop_factor Application.get_env(:ello_stream, :base_slop_factor)
@block_slop_multiplier Application.get_env(:ello_stream, :block_slop_multiplier)
@max_block_slop_factor Application.get_env(:ello_stream, :max_block_slop_factor)
@nsfw_slop_factor Application.get_env(:ello_stream, :nsfw_slop_factor)
@nudity_slop_factor Application.get_env(:ello_stream, :nudity_slop_factor)
@cred_slop_factor Application.get_env(:ello_stream, :cred_slop_factor)
def slop_factor(stream, factors \\ %{}) do
base_slop_factor = factors[:base_slop_factor] || @base_slop_factor
block_slop_multiplier = factors[:block_slop_multiplier] || @block_slop_multiplier
max_block_slop_factor = factors[:max_block_slop_factor] || @max_block_slop_factor
nsfw_slop_factor = factors[:nsfw_slop_factor] || @nsfw_slop_factor
nudity_slop_factor = factors[:nudity_slop_factor] || @nudity_slop_factor
cred_slop_factor = factors[:cred_slop_factor] || @cred_slop_factor
base_slop_factor +
calc_nsfw_slop_factor(stream, nsfw_slop_factor) +
calc_nudity_slop_factor(stream, nudity_slop_factor) +
calc_cred_slop_factor(stream, cred_slop_factor) +
calc_blocked_users_slop_factor(stream, block_slop_multiplier, max_block_slop_factor)
end
defp calc_blocked_users_slop_factor(%{current_user: %{all_blocked_ids: blocked}}, block_slop_multiplier, max_block_slop_factor)
when length(blocked) > 0
do
min(length(blocked) * block_slop_multiplier, max_block_slop_factor)
end
defp calc_blocked_users_slop_factor(_, _, _), do: 0.0
defp calc_nsfw_slop_factor(%{allow_nsfw: true}, _), do: 0.0
defp calc_nsfw_slop_factor(_, nsfw_slop_factor), do: nsfw_slop_factor
defp calc_nudity_slop_factor(%{allow_nudity: true}, _), do: 0.0
defp calc_nudity_slop_factor(_, nudity_slop_factor), do: nudity_slop_factor
defp calc_cred_slop_factor(%{require_cred: true}, cred_slop_factor), do: cred_slop_factor
defp calc_cred_slop_factor(_, _), do: 0.0
end
|
apps/ello_stream/lib/ello_stream/slop.ex
| 0.516839
| 0.409723
|
slop.ex
|
starcoder
|
defmodule Phoenix.Ecto.SQL.Sandbox do
@moduledoc """
A plug to allow concurrent, transactional acceptance tests with Ecto.Adapters.SQL.Sandbox.
## Example
This plug should only be used during tests. First, set a flag to
enable it in `config/test.exs`:
config :your_app, sql_sandbox: true
And use the flag to conditionally add the plug to `lib/your_app/endpoint.ex`:
if Application.get_env(:your_app, :sql_sandbox) do
plug Phoenix.Ecto.SQL.Sandbox
end
Then, within an acceptance test, checkout a sandboxed connection as before.
Use `metadata_for/2` helper to get the session metadata to that will allow access
to the test's connection.
Here's an example using [Hound](https://hex.pm/packages/hound):
use Hound.Helpers
setup do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(YourApp.Repo)
metadata = Phoenix.Ecto.SQL.Sandbox.metadata_for(YourApp.Repo, self())
Hound.start_session(metadata: metadata)
end
"""
import Plug.Conn
def init(opts \\ []) do
Keyword.get(opts, :sandbox, Ecto.Adapters.SQL.Sandbox)
end
def call(conn, sandbox) do
conn
|> get_req_header("user-agent")
|> List.first
|> extract_metadata
|> allow_sandbox_access(sandbox)
conn
end
@doc """
Returns metadata to associate with the session
to allow the endpoint to acces the database connection checked
out by the test process.
"""
@spec metadata_for(Ecto.Repo.t | [Ecto.Repo.t], pid) :: map
def metadata_for(repo_or_repos, pid) when is_pid(pid) do
%{repo: repo_or_repos, owner: pid}
end
defp allow_sandbox_access(%{repo: repo, owner: owner}, sandbox) do
Enum.each(List.wrap(repo), &sandbox.allow(&1, owner, self()))
end
defp allow_sandbox_access(_metadata, _sandbox), do: nil
defp extract_metadata(user_agent) when is_binary(user_agent) do
ua_last_part = user_agent |> String.split("/") |> List.last
case Regex.run(~r/BeamMetadata \((.*?)\)/, ua_last_part) do
[_, metadata] -> parse_metadata(metadata)
_ -> %{}
end
end
defp extract_metadata(_), do: %{}
defp parse_metadata(encoded_metadata) do
encoded_metadata
|> Base.url_decode64!
|> :erlang.binary_to_term
|> case do
{:v1, metadata} -> metadata
_ -> %{}
end
end
end
|
lib/phoenix_ecto/sql/sandbox.ex
| 0.826922
| 0.474692
|
sandbox.ex
|
starcoder
|
defmodule Bonny.Server.Scheduler.Binding do
@moduledoc """
Kubernetes [binding](#placeholder) interface.
Currently [undocumented](https://github.com/kubernetes/kubernetes/issues/75749) in Kubernetes docs.
## Links
* [Example using curl](https://gist.github.com/kelseyhightower/2349c9c645d32a3fcbe385082de74668)
* [Example using golang](https://banzaicloud.com/blog/k8s-custom-scheduler/)
"""
@json_headers [{"Accept", "application/json"}, {"Content-Type", "application/json"}]
@doc """
Returns a map representing a `Binding` kubernetes resource
## Example
iex> pod = %{"metadata" => %{"name" => "nginx", "namespace" => "default"}}
...> node = %{"metadata" => %{"name" => "kewl-node"}}
iex> Bonny.Server.Scheduler.Binding.new(pod, node)
%{"apiVersion" => "v1", "kind" => "Binding", "metadata" => %{"name" => "nginx", "namespace" => "default"}, "target" => %{"apiVersion" => "v1", "kind" => "Node", "name" => "kewl-node"}}
"""
@spec new(map(), map()) :: map()
def new(pod, node) do
pod_name = K8s.Resource.name(pod)
pod_namespace = K8s.Resource.namespace(pod)
node_name = K8s.Resource.name(node)
%{
"apiVersion" => "v1",
"kind" => "Binding",
"metadata" => %{
"name" => pod_name,
"namespace" => pod_namespace
},
"target" => %{
"apiVersion" => "v1",
"kind" => "Node",
"name" => node_name
}
}
end
@doc """
Performs a POST HTTP request against the pod's binding subresource.
`/api/v1/namespaces/{NAMESPACE}/pods/{POD}/binding`
"""
@spec create(map, atom) :: {:ok, HTTPoison.Response.t()} | {:error, HTTPoison.Error.t()}
def create(binding, cluster) do
pod_name = K8s.Resource.name(binding)
pod_namespace = K8s.Resource.namespace(binding)
node_name = get_in(binding, ["target", "name"])
operation = K8s.Operation.build(:get, "v1", :pod, namespace: pod_namespace, name: pod_name)
with {:ok, base_url} <- K8s.Cluster.url_for(operation, cluster),
{:ok, cluster_connection_config} <- K8s.Cluster.conn(cluster),
{:ok, request_options} <- K8s.Conn.RequestOptions.generate(cluster_connection_config),
{:ok, body} <- Jason.encode(binding),
headers <- request_options.headers ++ @json_headers,
options <- [ssl: request_options.ssl_options] do
metadata = %{pod_name: pod_name, pod_namespace: pod_namespace, node_name: node_name}
{measurements, response} =
Bonny.Sys.Event.measure(HTTPoison, :post, ["#{base_url}/binding", body, headers, options])
case response do
{:ok, body} ->
Bonny.Sys.Event.scheduler_binding_succeeded(measurements, metadata)
{:ok, body}
{:error, error} ->
Bonny.Sys.Event.scheduler_binding_failed(measurements, metadata)
{:error, error}
end
end
end
end
|
lib/bonny/server/scheduler/binding.ex
| 0.841631
| 0.453322
|
binding.ex
|
starcoder
|
defmodule AFK.State do
@moduledoc """
A GenServer process representing the current state of the keyboard.
The process can effectively be thought of as a realtime stream manipulator. It
receives key press and key release events (through `press_key/2` and
`release_key/2` respectively) and transforms them into an outgoing event
stream of HID reports.
The process will send message to the given `:event_receiver` of the form
`{:hid_report, hid_report}`.
"""
use GenServer
alias AFK.ApplyKeycode
alias AFK.State.Keymap
@enforce_keys [
:event_receiver,
:hid_report_mod,
:indexed_keys,
:keymap,
:keys,
:last_hid_report,
:locked_keys,
:modifiers,
:pending_lock?
]
defstruct [
:event_receiver,
:hid_report_mod,
:indexed_keys,
:keymap,
:keys,
:last_hid_report,
:locked_keys,
:modifiers,
:pending_lock?
]
@type t :: %__MODULE__{
event_receiver: pid,
hid_report_mod: atom,
indexed_keys: %{non_neg_integer => {atom, AFK.Keycode.Key.t()}},
keymap: Keymap.t(),
keys: %{atom => AFK.Keycode.t()},
last_hid_report: nil | binary,
locked_keys: [{atom, AFK.Keycode.t()}],
modifiers: [{atom, AFK.Keycode.Modifier.t()}],
pending_lock?: boolean
}
@type args :: [
event_receiver: pid,
keymap: AFK.Keymap.t(),
hid_report_mod: atom
]
# Client
@doc """
Starts the state GenServer.
The three required arguments (in the form of a keyword list) are:
* `:event_receiver` - A PID to send the HID reports to.
* `:keymap` - The keymap to use (see `AFK.Keymap` for details).
* `:hid_report_mod` - A module that implements the `AFK.HIDReport` behaviour.
"""
@spec start_link(args, opts :: GenServer.options()) :: GenServer.on_start()
def start_link(args, opts \\ []) do
event_receiver = Keyword.fetch!(args, :event_receiver)
keymap = Keyword.fetch!(args, :keymap)
hid_report_mod = Keyword.fetch!(args, :hid_report_mod)
state =
struct!(__MODULE__,
event_receiver: event_receiver,
hid_report_mod: hid_report_mod,
indexed_keys: %{},
keymap: Keymap.new(keymap),
keys: %{},
last_hid_report: nil,
locked_keys: [],
modifiers: [],
pending_lock?: false
)
GenServer.start_link(__MODULE__, state, opts)
end
@doc """
Presses a key.
The given key must not already be being pressed, otherwise the server will
crash.
"""
@spec press_key(server :: GenServer.server(), key :: atom) :: :ok
def press_key(server, key) do
GenServer.cast(server, {:press_key, key})
end
@doc """
Releases a key being pressed.
The given key must be being pressed, otherwise the server will crash.
"""
@spec release_key(server :: GenServer.server(), key :: atom) :: :ok
def release_key(server, key) do
GenServer.cast(server, {:release_key, key})
end
# Server
@doc false
@spec init(state :: AFK.State.t()) :: {:ok, AFK.State.t()}
def init(state) do
state = report!(state)
{:ok, state}
end
@doc false
@spec handle_cast(msg :: {:press_key | :release_key, atom}, state :: AFK.State.t()) :: {:noreply, AFK.State.t()}
def handle_cast({:press_key, key}, state) do
if Map.has_key?(state.keys, key), do: raise("Already pressed key pressed again! #{key}")
keycode = Keymap.find_keycode(state.keymap, key)
state = %{state | keys: Map.put(state.keys, key, keycode)}
state = handle_key_locking(state, key, keycode)
state = ApplyKeycode.apply_keycode(keycode, state, key)
state = report!(state)
{:noreply, state}
end
@doc false
def handle_cast({:release_key, key}, %__MODULE__{} = state) do
if !Map.has_key?(state.keys, key), do: raise("Unpressed key released! #{key}")
%{^key => keycode} = state.keys
keys = Map.delete(state.keys, key)
state = %{state | keys: keys}
state =
if keycode in Keyword.get_values(state.locked_keys, key) do
state
else
ApplyKeycode.unapply_keycode(keycode, state, key)
end
state = report!(state)
{:noreply, state}
end
defp handle_key_locking(state, _key, %AFK.Keycode.KeyLock{}), do: state
defp handle_key_locking(%__MODULE__{pending_lock?: true} = state, key, keycode) do
locked_keys = [{key, keycode} | state.locked_keys]
%{state | locked_keys: locked_keys, pending_lock?: false}
end
defp handle_key_locking(state, key, keycode) do
if Keyword.has_key?(state.locked_keys, key) do
locked_keys =
Enum.filter(state.locked_keys, fn
{^key, ^keycode} -> false
{_key, _keycode} -> true
end)
%{state | locked_keys: locked_keys}
else
state
end
end
defp report!(state) do
hid_report = state.hid_report_mod.hid_report(state)
if state.last_hid_report != hid_report do
send(state.event_receiver, {:hid_report, hid_report})
%{state | last_hid_report: hid_report}
else
state
end
end
end
|
lib/afk/state.ex
| 0.82308
| 0.582105
|
state.ex
|
starcoder
|
defmodule Xandra.Page do
@moduledoc ~S"""
A struct that represents a page of rows.
This struct represents a page of rows that have been returned by the
Cassandra server in response to a query such as `SELECT`, but have not yet
been parsed into Elixir values.
This struct implements the `Enumerable` protocol and is therefore a stream. It
is through this protocol that a `Xandra.Page` struct can be parsed into Elixir
values. The simplest way of getting a list of single rows out of a
`Xandra.Page` struct is to use something like `Enum.to_list/1`. Each element
emitted when streaming out of a `Xandra.Page` struct is a map of string column
names to their corresponding value.
The following fields are public and can be accessed or relied on:
* `paging_state` - the current paging state. Its value can be used to check
whether more pages are available to fetch after the given page.
This is useful when implementing manual paging.
See also the documentation for `Xandra.execute/4`.
## Examples
statement = "SELECT name, age FROM users"
%Xandra.Page{} = page = Xandra.execute!(conn, statement, _params = [])
Enum.each(page, fn %{"name" => name, "age" => age} ->
IO.puts "Read user with name #{name} (age #{age}) out of the database"
end)
"""
defstruct [:content, :columns, :paging_state]
@type paging_state :: binary | nil
@type t :: %__MODULE__{
content: list,
columns: nonempty_list,
paging_state: paging_state
}
defimpl Enumerable do
def reduce(%{content: content, columns: columns}, acc, fun) do
reduce(content, columns, acc, fun)
end
def member?(_page, _value), do: {:error, __MODULE__}
def count(_page), do: {:error, __MODULE__}
def slice(_page), do: {:error, __MODULE__}
defp reduce(_content, _columns, {:halt, acc}, _fun) do
{:halted, acc}
end
defp reduce(content, columns, {:suspend, acc}, fun) do
{:suspended, acc, &reduce(content, columns, &1, fun)}
end
defp reduce([values | rest], columns, {:cont, acc}, fun) do
row = zip(values, columns, []) |> :maps.from_list()
reduce(rest, columns, fun.(row, acc), fun)
end
defp reduce([], _columns, {:cont, acc}, _fun) do
{:done, acc}
end
defp zip([], [], acc), do: acc
defp zip([value | values], [{_, _, name, _} | columns], acc) do
zip(values, columns, [{name, value} | acc])
end
end
defimpl Inspect do
import Inspect.Algebra
def inspect(page, options) do
properties = [
rows: Enum.to_list(page),
more_pages?: page.paging_state != nil
]
concat(["#Xandra.Page<", to_doc(properties, options), ">"])
end
end
end
|
lib/xandra/page.ex
| 0.891622
| 0.626267
|
page.ex
|
starcoder
|
defmodule Infer.Video do
@moduledoc """
Video type matchers based on the [magic number](https://en.wikipedia.org/wiki/Magic_number_(programming))
"""
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a m4v.
"""
@spec m4v?(binary()) :: boolean()
def m4v?(<<_data::binary-size(4), 0x66, 0x74, 0x79, 0x70, 0x4D, 0x43, 0x56, _rest::binary>>), do: true
def m4v?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a mkv.
## Examples
iex> binary = File.read!("test/videos/sample.mkv")
iex> Infer.Video.mkv?(binary)
true
"""
@spec mkv?(binary()) :: boolean()
def mkv?(<<0x1A, 0x45, 0xDF, 0xA3, 0x93, 0x42, 0x82, 0x88, 0x6D, 0x61, 0x74, 0x72, 0x6F, 0x73, 0x6B, 0x61, _rest::binary>>), do: true
def mkv?(<<_data::binary-size(31), 0x6D, 0x61, 0x74, 0x72, 0x6F, 0x73, 0x6B, 0x61, _rest::binary>>), do: true
def mkv?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a webm video.
## Examples
iex> binary = File.read!("test/videos/sample.webm")
iex> Infer.Video.webm?(binary)
true
"""
@spec webm?(binary()) :: boolean()
def webm?(<<0x1A, 0x45, 0xDF, 0xA3, _rest::binary>>), do: true
def webm?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a mov video.
## Examples
iex> binary = File.read!("test/videos/sample.mov")
iex> Infer.Video.mov?(binary)
true
"""
@spec mov?(binary()) :: boolean()
def mov?(<<0x0, 0x0, 0x0, 0x14, 0x66, 0x74, 0x79, 0x70, _rest::binary>>), do: true
def mov?(<<_data::binary-size(4), 0x6D, 0x6F, 0x6F, 0x76, _rest::binary>>), do: true
def mov?(<<_data::binary-size(4), 0x6D, 0x64, 0x61, 0x74, _rest::binary>>), do: true
def mov?(<<_data::binary-size(12), 0x6D, 0x64, 0x61, 0x74, _rest::binary>>), do: true
def mov?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a AVI video.
## Examples
iex> binary = File.read!("test/videos/sample.avi")
iex> Infer.Video.avi?(binary)
true
"""
@spec avi?(binary()) :: boolean()
def avi?(<<0x52, 0x49, 0x46, 0x46, _data::binary-size(4), 0x41, 0x56, 0x49, _rest::binary>>), do: true
def avi?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a wmv video.
"""
@spec wmv?(binary()) :: boolean()
def wmv?(<<0x30, 0x26, 0xB2, 0x75, 0x8E, 0x66, 0xCF, 0x11, 0xA6, 0xD9, _rest::binary>>), do: true
def wmv?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a mpeg video.
## Examples
iex> binary = File.read!("test/videos/sample.mpeg")
iex> Infer.Video.mpeg?(binary)
true
"""
@spec mpeg?(binary()) :: boolean()
def mpeg?(<<0x0, 0x0, 0x1, check_byte, _rest::binary>>) when check_byte >= 0xB0 and check_byte <= 0xBF, do: true
def mpeg?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a flv video.
"""
@spec flv?(binary()) :: boolean()
def flv?(<<0x46, 0x4C, 0x56, 0x01, _rest::binary>>), do: true
def flv?(_binary), do: false
@doc """
Takes the binary file contents as arguments. Returns `true` if it's a mp4 video.
## Examples
iex> binary = File.read!("test/videos/sample.mp4")
iex> Infer.Video.mp4?(binary)
true
"""
@spec mp4?(binary()) :: boolean()
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "r", "v", "c", "1", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "d", "a", "s", "h", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "i", "s", "o", "2", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "i", "s", "o", "3", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "i", "s", "o", "4", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "i", "s", "o", "5", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "i", "s", "o", "6", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "i", "s", "o", "m", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "m", "m", "p", "4", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "m", "p", "4", "1", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "m", "p", "4", "2", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "m", "p", "4", "v", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "m", "p", "7", "1", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "M", "S", "N", "V", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "N", "D", "A", "S", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "N", "D", "S", "C", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "N", "S", "D", "C", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "N", "D", "S", "H", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "N", "D", "S", "M", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "N", "D", "S", "P", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "N", "D", "S", "S", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "N", "D", "X", "C", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "N", "D", "X", "H", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "N", "D", "X", "M", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "N", "D", "X", "P", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "N", "D", "X", "S", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "F", "4", "V", " ", _rest::binary>>), do: true
def mp4?(<<_data::binary-size(4), "f", "t", "y", "p", "F", "4", "P", " ", _rest::binary>>), do: true
def mp4?(_binary), do: false
end
|
lib/matchers/video.ex
| 0.790045
| 0.498779
|
video.ex
|
starcoder
|
defmodule Dagger.Step do
@moduledoc """
A step represents a recipe of dependent calculations to make for a given input.
Dagger Steps model dependent operations that can be run concurrently by a runner.
A top level step can be run when given the required parameters such as a `runner`, an `input`, a `name`, and a `work` function.
Once a step has been run, dependent steps in the `steps` key are enqueued to be run with the parent step as input.
A datastructure of these nested steps can be constructed without root inputs, injected with inputs at runtime,
then dispatched to a runner for processing. The step processor only has to execute `Step.run/1` to start the pipeline of dependent steps.
Assuming the Queue passed into the step is valid and feeds into more step processors the whole pipeline will run.
For example once a step to run a calculation has been run,
a notification to follow up with results can be published via a dependent step.
However in most cases you would use this module when you want both `concurrent` execution along side step-by-step dependent execution.
Dependent steps can be added using the `add_step/2` function.
```elixir
example_step_pipeline = %{
tokenization: %Step{
name: :tokenization,
work: &TextProcessing.tokenize/1,
steps: %{ # steps here will be enqueued to run upon `:tokenization`'s completion
down_case: %Step{
name: :down_case,
work: &TextProcessing.downcase/1,
steps: %{
word_counts: %Step{
name: :word_counts,
work: &TextProcessing.word_counts/1,
},
basic_sentiment: %Step{
name: :basic_sentiment,
work: &SentimentAnalysis.basic_sentiment/1,
}
}
},
total_count_of_words: %Step{
name: :total_count_of_words,
work: &TextProcessing.count_of_words/1
},
}
},
# Root steps can run concurrently from other steps.
google_nlp: %Step{
name: :google_nlp,
work: &GoogleNLP.analyze_text/1,
},
dalle_challe_readability: %Step{
name: :dalle_challe_readability,
work: &Readbility.dalle_chall_readability/1,
},
}
```
TODO:
- [x] Implement `run_id` to keep track of active steps this will let us handle retries and prune deactivated steps from session terminations
- [] step retry counts?
* Maybe context is an anti-pattern?
"""
defstruct name: nil,
run_id: nil,
work: nil,
steps: nil,
input: nil,
result: nil,
runner: nil,
runnable?: false,
context: %{}
@type t :: %__MODULE__{
name: binary(),
run_id: term(),
work: function(),
steps: map(),
input: term(),
result: term(),
runner: module(),
runnable?: boolean(),
context: map(),
}
def new(params) do
struct!(__MODULE__, params)
end
def can_run?(%__MODULE__{runnable?: true}), do: :ok
def can_run?(%__MODULE__{}), do: :error
def set_queue(%__MODULE__{} = step, runner),
do: set_step_value(step, :runner, runner)
def set_work(%__MODULE__{} = step, work)
when is_function(work), do: set_step_value(step, :work, work)
def set_result(%__MODULE__{} = step, result),
do: set_step_value(step, :result, result)
def set_input(%__MODULE__{} = step, input),
do: set_step_value(step, :input, input)
def assign_run_id(%__MODULE__{} = step),
do: set_step_value(step, :run_id, UUID.uuid4())
def add_context(%__MODULE__{context: existing_context} = step, key, context) do
%__MODULE__{step | context: Map.put(existing_context, key, context)}
end
defp set_step_value(%__MODULE__{} = step, key, value)
when key in [:work, :input, :result, :runner, :run_id] do
Map.put(step, key, value) |> evaluate_runnability()
end
@doc """
Checks if a given step is runnable.
This is chained automatically within `set_queue/2`, `set_result/2`, `set_input/2`, and `new/1`.
`can_run?/1` can be used at run time to ensure a step is runnable before allocating resources and executing side effects.
"""
def evaluate_runnability(%__MODULE__{
name: name,
work: work,
input: input,
result: nil,
runner: runner,
run_id: run_id,
} = step)
when is_function(work, 1)
and not is_nil(name)
and not is_nil(input)
and not is_nil(runner)
and not is_nil(run_id)
do
%__MODULE__{step | runnable?: true}
end
def evaluate_runnability(%__MODULE__{} = step), do:
%__MODULE__{step | runnable?: false}
@doc """
Adds a child step to be enqueued with the result of the previous upon running.
### Usage
```elixir
parent_step = %Step{name: "parent step"}
child_step = %Step{name: "child step"}
parent_with_child = Step.add_step(parent_step, child_step)
```
"""
def add_step(%__MODULE__{steps: nil} = parent_step, child_step) do
add_step(%__MODULE__{parent_step | steps: %{}}, child_step)
end
def add_step(
%__MODULE__{steps: %{} = steps} = parent_step,
%__MODULE__{name: name} = child_step)
do
%__MODULE__{parent_step | steps: Map.put_new(steps, name, child_step)}
end
@doc """
Assuming a runnable step, `run/1` executes the function contained in `work`,
sets the `result` with the return and enqueues dependent steps with the result as the input for the children.
"""
def run(%__MODULE__{runnable?: false}), do: {:error, "step not runnable"}
def run(%__MODULE__{work: work, input: input} = step) # consider mfa
when is_function(work) do
with {:ok, result} <- work.(input) do # we're assuming that the work function follows {:ok, _} | {:error, _} conventions - better way?
updated_step =
step
|> set_result(result)
|> set_parent_as_result_for_children()
|> assign_run_id_for_children()
|> dispatch_to_runners() # this is runner specific
{:ok, updated_step}
else
{:error, _} = error -> error
error -> error
end
end
def set_parent_as_result_for_children(%__MODULE__{steps: nil} = parent_step), do: parent_step
def set_parent_as_result_for_children(%__MODULE__{result: nil} = parent_step), do: parent_step
def set_parent_as_result_for_children(%__MODULE__{steps: steps} = parent_step) do
child_steps = Enum.map(steps, fn {name, step} ->
{name, set_input(step, parent_step)}
end)
%__MODULE__{parent_step | steps: child_steps}
end
def assign_run_id_for_children(%__MODULE__{steps: nil} = parent_step), do: parent_step
def assign_run_id_for_children(%__MODULE__{steps: steps} = parent_step) do
child_steps = Enum.map(steps, fn {name, step} ->
{name, assign_run_id(step)}
end)
%__MODULE__{parent_step | steps: child_steps}
end
def dispatch_to_runners(%__MODULE__{steps: nil} = step),
do: evaluate_runnability(step)
def dispatch_to_runners(%__MODULE__{steps: steps, runner: runner} = step) do
Enum.each(steps, fn {_name, step} -> runner.run(step) end)
step
end
end
|
lib/step.ex
| 0.904682
| 0.917303
|
step.ex
|
starcoder
|
defmodule Faker.Food.En do
import Faker, only: [sampler: 2]
@moduledoc """
Functions for food data in English.
"""
@doc """
Returns a dish.
## Examples
iex> Faker.Food.En.dish()
"Vegetable Soup"
iex> Faker.Food.En.dish()
"Fish and chips"
iex> Faker.Food.En.dish()
"Pork belly buns"
iex> Faker.Food.En.dish()
"Pasta Carbonara"
"""
@spec dish() :: String.t()
sampler(:dish, [
"Barbecue Ribs",
"Bruschette with Tomato",
"Caesar Salad",
"Califlower penne",
"California maki",
"Caprese Salad",
"Cheeseburger",
"Chicken Fajitas",
"Chicken milanese",
"Chicken wings",
"Chilli con carne",
"Ebiten maki",
"Fettuccine Alfredo",
"Fish and chips",
"French fries with sausages",
"Katsu Curry",
"Lasagne",
"Linguine with clams",
"Meatballs with sauce",
"Mushroom Risotto",
"Pappardelle alla Bolognese",
"Pasta Carbonara",
"Pasta and Beans",
"Pasta with Tomato and Basil",
"Philadelphia maki",
"Pizza",
"Pork belly buns",
"Pork sausage roll",
"Ricotta stuffed Ravioli",
"Risotto with seafood",
"Salmon nigiri",
"Scotch eggs",
"Souvlaki",
"Teriyaki Chicken donburi",
"TiramisΓΉ",
"Tuna sashimi",
"Vegetable Soup"
])
@doc """
Returns a description.
## Examples
iex> Faker.Food.En.description()
"Two buttermilk waffles, topped with whipped cream and maple syrup, a side of two eggs served any style, and your choice of smoked bacon or smoked ham."
iex> Faker.Food.En.description()
"28-day aged 300g USDA Certified Prime Ribeye, rosemary-thyme garlic butter, with choice of two sides."
iex> Faker.Food.En.description()
"Breaded fried chicken with waffles, and a side of maple syrup."
iex> Faker.Food.En.description()
"Creamy mascarpone cheese and custard layered between espresso and rum soaked house-made ladyfingers, topped with Valrhona cocoa powder."
"""
@spec description() :: String.t()
sampler(:description, [
"Three eggs with cilantro, tomatoes, onions, avocados and melted Emmental cheese. With a side of roasted potatoes, and your choice of toast or croissant.",
"Three egg omelet with Roquefort cheese, chives, and ham. With a side of roasted potatoes, and your choice of toast or croissant.",
"Three egg whites with spinach, mushrooms, caramelized onions, tomatoes and low-fat feta cheese. With herbed quinoa, and your choice of rye or whole-grain toast.",
"Smoked salmon, poached eggs, diced red onions and Hollandaise sauce on an English muffin. With a side of roasted potatoes.",
"Fresh parsley, Italian sausage, shallots, garlic, sun-dried tomatoes and mozzarella cheese in an all-butter crust. With a side of mixed fruits.",
"Thick slices of French toast bread, brown sugar, half-and-half and vanilla, topped with powdered sugar. With two eggs served any style, and your choice of smoked bacon or smoked ham.",
"Two buttermilk waffles, topped with whipped cream and maple syrup, a side of two eggs served any style, and your choice of smoked bacon or smoked ham.",
"Breaded fried chicken with waffles, and a side of maple syrup.",
"Two butter croissants of your choice (plain, almond or cheese). With a side of herb butter or house-made hazelnut spread.",
"28-day aged 300g USDA Certified Prime Ribeye, rosemary-thyme garlic butter, with choice of two sides.",
"Breaded fried chicken with waffles. Served with maple syrup.",
"Fresh Norwegian salmon, lightly brushed with our herbed Dijon mustard sauce, with choice of two sides.",
"Creamy mascarpone cheese and custard layered between espresso and rum soaked house-made ladyfingers, topped with Valrhona cocoa powder.",
"Granny Smith apples mixed with brown sugar and butter filling, in a flaky all-butter crust, with ice cream."
])
@doc """
Returns an ingredient.
## Examples
iex> Faker.Food.En.ingredient()
"Tomatoes"
iex> Faker.Food.En.ingredient()
"Albacore Tuna"
iex> Faker.Food.En.ingredient()
"Potatoes"
iex> Faker.Food.En.ingredient()
"Tinned"
"""
@spec ingredient() :: String.t()
sampler(:ingredient, [
"Achacha",
"Adzuki Beans",
"Agar",
"Agave Syrup",
"Ajowan Seed",
"Albacore Tuna",
"Alfalfa",
"Allspice",
"Almond oil",
"Almonds",
"Amaranth",
"Amchur",
"Anchovies",
"Anchovies",
"Aniseed",
"Annatto seed",
"Apple Cider Vinegar",
"Apple juice",
"Apple Juice Concentrate",
"Apples",
"Bonza",
"Apples",
"Apricots",
"Arborio rice",
"Arrowroot",
"Artichoke",
"Arugula",
"Asafoetida",
"Asian Greens",
"Asian Noodles",
"Asparagus",
"Aubergine",
"Avocado",
"Avocado Oil",
"Avocado Spread",
"Bacon",
"Baking Powder",
"Baking Soda",
"Balsamic Vinegar",
"Bamboo Shoots",
"Banana",
"Barberry",
"Barley",
"Barramundi",
"Basil Basmati rice",
"Bay Leaves",
"Bean Shoots",
"Bean Sprouts",
"Beans",
"Green beans",
"Beef",
"Beetroot",
"Berries",
"Black Eyed Beans",
"Blackberries",
"Blood oranges",
"Blue Cheese",
"Blue Eye Trevalla",
"Blue Swimmer Crab",
"Blueberries",
"Bocconcini",
"Bok Choy",
"Bonito Flakes",
"Borlotti Beans",
"Brazil Nut",
"Bran",
"Bread",
"RyeBread",
"Sour Dough Bread",
"SpeltBread",
"WhiteBread",
"Wholegrain Bread",
"Wholemeal",
"Brie",
"Broccoli",
"Broccolini",
"Brown Rice",
"Brown rice vinegar",
"Brussels Sprouts",
"Buckwheat",
"Buckwheat Noodles",
"BulghurBurghul",
"Bush Tomato",
"Butter",
"Butter Beans",
"Buttermilk",
"Butternut lettuce",
"Butternut pumpkin",
"Cabbage",
"Cacao",
"Cake",
"Calamari",
"Camellia Tea Oil",
"Camembert",
"Camomile",
"Candle Nut",
"Cannellini Beans",
"Canola Oil",
"Cantaloupe",
"Capers",
"Capsicum",
"Starfruit",
"Caraway Seed",
"Cardamom",
"CarobCarrot",
"Carrot",
"Cashews",
"Cassia bark",
"Cauliflower",
"Cavalo",
"Cayenne",
"Celery",
"Celery Seed",
"Cheddar",
"Cherries",
"Cherries",
"Chestnut",
"Chestnut",
"Chia seeds",
"Chicken",
"Chickory",
"Chickpea",
"Chilli Pepper",
"FreshChillies",
"dried Chinese Broccoli",
"Chinese Cabbage",
"Chinese Five Spice",
"Chives",
"Dark Chocolate",
"MilkChocolate",
"Choy Sum",
"Cinnamon",
"Clams",
"Cloves",
"Cocoa powder",
"Coconut",
"Coconut Oil",
"Coconut water",
"Coffee",
"Corella Pear",
"Coriander Leaves",
"Coriander Seed",
"Corn Oil",
"Corn Syrup",
"Corn Tortilla",
"Cornichons",
"Cornmeal",
"Cos lettuce",
"Cottage Cheese",
"Cous Cous",
"Crabs",
"Cranberry",
"Cream",
"Cream Cheese",
"Cucumber",
"Cumin",
"Cumquat",
"Currants",
"Curry Leaves",
"Curry Powder",
"Custard Apples",
"Custard ApplesDaikon",
"Dandelion",
"Dashi",
"Dates",
"Dill",
"Dragonfruit",
"Dried Apricots",
"Duck",
"Edam",
"Edamame",
"Eggplant",
"Eggs",
"Elderberry",
"Endive",
"English Spinach",
"Extra Virgin Olive Oil",
"Farmed Prawns",
"Feijoa",
"Fennel",
"Fennel Seeds",
"Fenugreek",
"Feta",
"Figs",
"File Powder",
"Fingerlime",
"Fish Sauce",
"Flathead",
"Flaxseed",
"Flaxseed Oil",
"Flounder",
"Flour",
"Besan",
"Buckwheat Flour",
"FlourOat",
"FlourPotato",
"FlourRice",
"Brown Flour",
"WhiteFlour",
"SoyFlour",
"Tapioca Flour",
"UnbleachedFlour",
"Wholewheat flour",
"Freekeh",
"French eschallots",
"Fromage Blanc",
"Fruit",
"Galangal",
"Garam Masala",
"Garlic",
"Garlic",
"Chives",
"GemGinger",
"Goat Cheese",
"Goat Milk",
"Goji Berry",
"Grape Seed Oil",
"Grapefruit",
"Grapes",
"Green Chicken Curry",
"Green Pepper",
"Green Tea",
"Green Tea noodles",
"Greenwheat Freekeh",
"Gruyere",
"Guava",
"Gula MelakaHaloumiHam",
"Haricot Beans",
"Harissa",
"Hazelnut",
"Hijiki",
"Hiramasa Kingfish",
"Hokkien Noodles",
"Honey",
"Honeydew melon",
"Horseradish",
"Hot smoked salmon",
"Hummus",
"Iceberg lettuce",
"Incaberries",
"Jarrahdale pumpkin",
"Jasmine rice",
"Jelly",
"<NAME>",
"Jewfish",
"Jicama",
"<NAME>",
"Lime Leaves",
"Kale",
"Kangaroo",
"Kecap Manis",
"Kenchur",
"Kidney Beans",
"Kidneys",
"Kiwi Fruit",
"Kiwiberries",
"Kohlrabi",
"Kokam",
"Kombu",
"Koshihikari rice",
"Kudzu",
"Kumera",
"Lamb",
"Lavender Flowers",
"Leeks",
"Lemon",
"Lemongrass",
"Lentils",
"Lettuce",
"Licorice",
"Limes",
"Liver",
"Lobster",
"Longan",
"Loquats",
"Lotus Root",
"Lychees",
"Lychees",
"Macadamia Nut",
"Macadamia oil",
"Mace",
"Mackerel",
"Mackerel",
"Tinned",
"Mahi mahi",
"Mahlab",
"Malt vinegar",
"Mandarins",
"Mango",
"Mangosteens",
"Maple Syrup",
"Margarine",
"Marigold",
"Marjoram",
"Mastic",
"Melon",
"Milk",
"Mint",
"Miso",
"Molasses",
"Monkfish",
"Morwong",
"Mountain Bread",
"Mozzarella",
"Muesli",
"Mulberries",
"Mullet",
"Mung Beans",
"Flat Mushrooms",
"Brown Mushrooms",
"Common Cultivated Mushrooms",
"Enoki Mushrooms",
"Oyster Mushrooms",
"Shiitake Mushrooms",
"Mussels",
"Mustard",
"Mustard Seed",
"Nashi Pear",
"Nasturtium",
"Nectarines",
"Nori",
"Nutmeg",
"Nutritional Yeast",
"Nuts",
"Oatmeal",
"Oats",
"Octopus",
"Okra",
"Olive Oil",
"Olives",
"Omega Spread",
"Onion",
"Oranges",
"Oregano",
"Oyster Sauce",
"Oysters",
"Pear",
"Pandanus leaves",
"Papaw",
"Papaya",
"Paprik",
"Parmesan cheese",
"Parrotfish",
"Parsley",
"Parsnip",
"Passionfruit",
"Pasta",
"Peaches",
"Peanuts",
"Pear Juice",
"Pears",
"Peas",
"Pecan Nut",
"Pecorino",
"PepitasPepper",
"Szechuan Pepperberry",
"Peppercorns",
"Peppermint",
"Peppers",
"Persimmon",
"Pine Nut",
"Pineapple",
"Pinto Beans",
"Pistachio Nut",
"Plums",
"Polenta",
"Pomegranate",
"Poppy Seed",
"Porcini mushrooms",
"Pork",
"Potatoes",
"Provolone",
"Prunes",
"Pumpkin",
"Pumpkin Seed",
"Purple carrot",
"Purple RiceQuail",
"Quark Quinc",
"Quinoa",
"Radicchio",
"Radish",
"Raisin",
"Raspberry",
"Red cabbage",
"Red Lentils",
"Red Pepper",
"Red Wine Vinegar",
"Redfish",
"Rhubarb",
"Rice Noodles",
"Rice paper",
"Rice Syrup",
"Ricemilk",
"Ricotta",
"Rockmelon",
"Rose Water",
"Rosemary",
"Rye",
"Safflower Oil",
"Saffron",
"Sage",
"Sake",
"Salmon",
"Sardines",
"Sausages",
"Scallops",
"Sea Salt",
"Semolina",
"Sesame Oil",
"Sesame seed",
"Sesame Seeds",
"Shark",
"Silverbeet",
"Slivered Almonds",
"Smoked Trout",
"Snapper",
"Snowpea sprouts",
"Snowpeas",
"Soba",
"Soy Beans",
"Soy Milk",
"Soy Sauce",
"Soy",
"Sprouts",
"Soymilk",
"Spearmint",
"Spelt",
"Spinach",
"Spring Onions",
"Squash",
"Squid",
"Star Anise",
"Star Fruit",
"Stevia",
"Beef Stock",
"Chicken Stock",
"Fish Stock",
"Vegetable Stock",
"Strawberries",
"Sugar",
"Sultanas",
"Sun dried tomatoes",
"Sunflower Oil",
"Sunflower Seeds",
"SwedeSweet Chilli Sauce",
"Sweet Potato",
"Swiss Chard",
"SwordfishTabasco",
"Tahini",
"Taleggio cheese",
"Tamari",
"Tamarillo",
"Tangelo",
"Tapioca",
"Tarragon",
"Tea",
"Tea Oil",
"Tempeh",
"ThymeTofu",
"Tom Yum",
"Tomatoes",
"Trout",
"Tuna",
"Turkey",
"Turmeric",
"Turnips",
"Vanilla Beans",
"Vegetable Oil",
"Vegetable spaghetti",
"Vermicelli Noodles",
"Vinegar",
"Wakame",
"Walnut",
"Warehou",
"Wasabi",
"Water",
"Watercress",
"Watermelon",
"Wattleseed",
"Wheat",
"Wheatgrass juice",
"White rice",
"White wine vinegar",
"Whiting Wild Rice",
"William Pear",
"RedWine",
"White Wine",
"Yeast",
"Yellow Papaw",
"Yellowtail Kingfish",
"Yoghurt",
"Yogurt",
"Zucchini"
])
@doc """
Returns a type of measurement.
## Examples
iex> Faker.Food.En.measurement()
"teaspoon"
iex> Faker.Food.En.measurement()
"gallon"
iex> Faker.Food.En.measurement()
"pint"
iex> Faker.Food.En.measurement()
"cup"
"""
@spec measurement() :: String.t()
sampler(:measurement, ["teaspoon", "tablespoon", "cup", "pint", "quart", "gallon"])
@doc """
Returns a measurement size.
## Examples
iex> Faker.Food.En.measurement_size()
"1/4"
iex> Faker.Food.En.measurement_size()
"3"
iex> Faker.Food.En.measurement_size()
"1"
iex> Faker.Food.En.measurement_size()
"1/2"
"""
@spec measurement_size() :: String.t()
sampler(:measurement_size, ["1/4", "1/3", "1/2", "1", "2", "3"])
@doc """
Returns a metric measurement.
## Examples
iex> Faker.Food.En.metric_measurement()
"centiliter"
iex> Faker.Food.En.metric_measurement()
"deciliter"
iex> Faker.Food.En.metric_measurement()
"liter"
iex> Faker.Food.En.metric_measurement()
"milliliter"
"""
@spec metric_measurement() :: String.t()
sampler(:metric_measurement, ["milliliter", "deciliter", "centiliter", "liter"])
@doc """
Returns a spicy ingredient.
## Examples
iex> Faker.Food.En.spice()
"Garlic Salt"
iex> Faker.Food.En.spice()
"Ras-el-Hanout"
iex> Faker.Food.En.spice()
"Curry Hot"
iex> Faker.Food.En.spice()
"Peppercorns Mixed"
"""
@spec spice() :: String.t()
sampler(:spice, [
"Achiote Seed",
"Ajwain Seed",
"Ajwan Seed",
"Allspice Ground",
"Allspice Whole",
"Amchoor",
"Anise",
"Anise Star",
"Aniseed Whole",
"Annatto Seed",
"Arrowroot",
"Asafoetida",
"Baharat",
"Balti Masala",
"Balti Stir Fry Mix",
"Basil",
"Bay Leaves",
"Bay Leaves Chopped",
"BBQ Seasoning",
"Biryani Spice Mix",
"Cajun Seasoning",
"Caraway Seed",
"Cardamom Ground",
"Cardamom Whole",
"Cassia",
"Cassia Bark",
"Cayenne Pepper",
"Celery Leaf",
"Celery Salt",
"Celery Seed",
"Chamomile",
"Chervil",
"Chicken Seasoning",
"Chilli Crushed",
"Chilli Ground",
"Chilli Pepper",
"Chillies Whole",
"China Star",
"Chinese 5 Spice",
"Chives",
"Cinnamon Bark",
"Cinnamon Ground",
"Cinnamon Powder",
"Cinnamon Sticks",
"Cloves Ground",
"Cloves Whole",
"Colombo Powder",
"Coriander Ground",
"Coriander Leaf",
"Coriander Seed",
"Creole Seasoning",
"Cumin Ground",
"Cumin Seed",
"Cumin Seed Black",
"Cumin Seed Royal",
"Curly Leaf Parsley",
"Curry Chinese",
"Curry Hot",
"Curry Leaves",
"Curry Madras Medium",
"Curry Mild",
"Curry Thai Green",
"Curry Thai Red",
"Dhansak Spice Mix",
"Dill Herb",
"Dill Leaf",
"Dill Seed",
"Fajita Seasoning",
"Fennel Seed",
"Fenugreek Ground",
"Fenugreek Leaf",
"Fenugreek Seed",
"Fines Herbes",
"Fish Seasoning",
"Five Spice Mix",
"French Lavender",
"Galangal Ground",
"Garam Masala",
"Garlic Chips",
"Garlic Granules",
"Garlic Powder",
"Garlic Salt",
"German Chamomile",
"Ginger Root",
"Ginger Ground",
"Green Cardamom",
"Herbes de Provence",
"Jalfrezi Curry Powder",
"Jalfrezi Mix",
"Jerk Seasoning",
"Juniper Berries",
"Kaffir Leaves",
"Korma Curry Powder",
"Korma Mix",
"Lamb Seasoning",
"Lavender",
"Lemon Grass",
"Lemon Grass Chopped",
"Lemon Pepper",
"Lime Leaves",
"Lime Leaves Ground",
"Liquorice Root",
"Mace Ground",
"Mace Whole",
"Mango Powder",
"Marjoram",
"Methi",
"Methi Leaves",
"Mexican Salsa Mix",
"Mint",
"Mixed Herbs",
"Mixed Spice",
"Mulled Cider Spices",
"Mulled Wine Spices",
"Mustard Powder",
"Mustard Seed Black",
"Mustard Seed Brown",
"Mustard Seed White",
"Mustard Seed Yellow",
"Nigella",
"Nutmeg Ground",
"Nutmeg Whole",
"Onion Seed",
"Orange Zest",
"Oregano",
"Paella Seasoning",
"Paprika",
"Paprika Hungarian",
"Paprika Smoked",
"Parsley",
"Parsley Flat Leaf",
"Pepper Black Coarse",
"Pepper Black Ground",
"Pepper White Ground",
"Peppercorns Black",
"Peppercorns Cracked Black",
"Peppercorns Green",
"Peppercorns Mixed",
"Peppercorns Pink",
"Peppercorns Szechwan",
"Peppercorns White",
"Pickling Spice",
"Pimento Berries",
"Pimento Ground",
"Piri Piri Seasoning",
"Pizza Topping Mix",
"Poppy Seed",
"Pot Marjoram",
"P<NAME> Colombo",
"Ras-el-Hanout",
"Rice Paper",
"Rogan Josh Curry Powder",
"Rogan Josh Mix",
"<NAME>",
"Rosemary",
"Saffron",
"Sage",
"Sea Salt Coarse",
"Seasoning Salt",
"Self Adhesive Spice Labels",
"Sesame Seed",
"Spearmint",
"Spice Charts",
"Steak Seasoning",
"Sumac Ground",
"Sweet Basil",
"Sweet Laurel",
"Tagine Seasoning",
"Tandoori Masala",
"Tandoori Mix",
"Tarragon",
"Thai Creen Curry Mix",
"Thai Red Curry Mix",
"Thai Stir Fry",
"Thyme",
"Tikka Masala",
"Tikka Masala Curry Powder",
"Turmeric",
"Turmeric Powder",
"Vanilla Bean",
"Vanilla Pods",
"Vegetable Seasoning",
"Zahtar Spice Mix"
])
end
|
lib/faker/food/en.ex
| 0.693058
| 0.509764
|
en.ex
|
starcoder
|
defmodule Aoc.Year2018.Day02 do
@moduledoc """
Solution to Day 02 of 2018: Inventory Management System
## --- Day 2: Inventory Management System ---
You stop falling through time, catch your breath, and check the screen on the
device. "Destination reached. Current Year: 1518. Current Location: North Pole
Utility Closet 83N10." You made it! Now, to find those anomalies.
Outside the utility closet, you hear footsteps and a voice. "...I'm not sure
either. But now that so many people have chimneys, maybe he could sneak in that
way?" Another voice responds, "Actually, we've been working on a new kind of
*suit* that would let him fit through tight spaces like that. But, I heard that
a few days ago, they lost the prototype fabric, the design plans, everything!
Nobody on the team can even seem to remember important details of the project!"
"Wouldn't they have had enough fabric to fill several boxes in the warehouse?
They'd be stored together, so the box IDs should be similar. Too bad it would
take forever to search the warehouse for *two similar box IDs*..." They walk too
far away to hear any more.
Late at night, you sneak to the warehouse - who knows what kinds of paradoxes
you could cause if you were discovered - and use your fancy wrist device to
quickly scan every box and produce a list of the likely candidates (your puzzle
input).
To make sure you didn't miss any, you scan the likely candidate boxes again,
counting the number that have an ID containing *exactly two of any letter* and
then separately counting those with *exactly three of any letter*. You can
multiply those two counts together to get a rudimentary checksum and compare it
to what your device predicts.
For example, if you see the following box IDs:
- `abcdef` contains no letters that appear exactly two or three times.
- `bababc` contains two `a` and three `b`, so it counts for both.
- `abbcde` contains two `b`, but no letter appears exactly three times.
- `abcccd` contains three `c`, but no letter appears exactly two times.
- `aabcdd` contains two `a` and two `d`, but it only counts once.
- `abcdee` contains two `e`.
- `ababab` contains three `a` and three `b`, but it only counts once.
Of these box IDs, four of them contain a letter which appears exactly twice, and
three of them contain a letter which appears exactly three times. Multiplying
these together produces a checksum of `4 * 3 = 12`.
*What is the checksum* for your list of box IDs?
## --- Part Two ---
Confident that your list of box IDs is complete, you're ready to find the boxes
full of prototype fabric.
The boxes will have IDs which differ by exactly one character at the same
position in both strings. For example, given the following box IDs:
`abcde, fghij, klmno, pqrst, fguij, axcye, wvxyz`
The IDs `abcde` and `axcye` are close, but they differ by two characters (the
second and fourth). However, the IDs `fghij` and `fguij` differ by exactly one
character, the third (`h` and `u`). Those must be the correct boxes.
*What letters are common between the two correct box IDs?* (In the example
above, this is found by removing the differing character from either ID,
producing `fgij`.)
"""
@doc """
"""
def part_1(input) do
input
|> String.split()
|> Enum.reduce({0, 0}, &find_twice_thrice/2)
|> (fn {x, y} -> x * y end).()
end
defp find_twice_thrice(string, {old_twice, old_thrice}) do
string
|> String.to_charlist()
|> Enum.reduce(%{}, fn char, acc -> Map.update(acc, char, 1, &(&1 + 1)) end)
|> Enum.reduce({0, 0}, fn
{_codepoint, 2}, {twice, thrice} when twice == 0 -> {1, thrice}
{_codepoint, 3}, {twice, thrice} when thrice == 0 -> {twice, 1}
{_codepoint, _}, {twice, thrice} -> {twice, thrice}
end)
|> (fn {twice, thrice} -> {old_twice + twice, old_thrice + thrice} end).()
end
@doc """
"""
def part_2(input) do
input
|> String.split()
|> Enum.map(&String.to_charlist/1)
|> string_when_different_by_one()
end
defp string_when_different_by_one([h | t]) do
Enum.find_value(t, &one_char_difference_string(&1, h, [], 0)) ||
string_when_different_by_one(t)
end
defp one_char_difference_string([h | t1], [h | t2], same_list, difference_count) do
one_char_difference_string(t1, t2, [h | same_list], difference_count)
end
defp one_char_difference_string([_ | t1], [_ | t2], same_list, difference_count) do
one_char_difference_string(t1, t2, same_list, difference_count + 1)
end
defp one_char_difference_string([], [], same_list, 1) do
same_list
|> Enum.reverse()
|> List.to_string()
end
defp one_char_difference_string([], [], _, _), do: nil
end
|
lib/aoc/year_2018/day_02.ex
| 0.735737
| 0.682917
|
day_02.ex
|
starcoder
|
defmodule Elsa.Group.Manager do
@moduledoc """
Defines the GenServer process that coordinates assignment
of workers to topics/partitions of a given consumer group.
Tracks consumer group state and reinstantiates workers to
the last unacknowledged message in the event of failure.
"""
use GenServer, shutdown: 5 * 60_000
require Logger
import Record, only: [defrecord: 2, extract: 2]
import Elsa.Supervisor, only: [registry: 1]
alias Elsa.Group.Manager.WorkerManager
defrecord :brod_received_assignment, extract(:brod_received_assignment, from_lib: "brod/include/brod.hrl")
@behaviour :brod_group_member
@type group :: String.t()
@type generation_id :: pos_integer()
@typedoc "Module that implements the Elsa.Consumer.MessageHandler behaviour"
@type handler :: module()
@typedoc "Function called for each new assignment"
@type assignment_received_handler ::
(group(), Elsa.topic(), Elsa.partition(), generation_id() -> :ok | {:error, term()})
@typedoc "Function called for when assignments have been revoked"
@type assignments_revoked_handler :: (() -> :ok)
@typedoc "Minimum bytes to fetch in batch of messages: default = 0"
@type min_bytes :: non_neg_integer()
@typedoc "Maximum bytes to fetch in batch of messages: default = 1MB"
@type max_bytes :: non_neg_integer()
@typedoc "Max number of milliseconds to wait to wait for broker to collect min_bytes of messages: default = 10_000 ms"
@type max_wait_time :: non_neg_integer()
@typedoc "Allow consumer process to sleep this amount of ms if kafka replied with 'empty' messages: default = 1_000 ms"
@type sleep_timeout :: non_neg_integer()
@typedoc "The windows size (number of messages) allowed to fetch-ahead: default = 10"
@type prefetch_count :: non_neg_integer()
@typedoc "The total number of bytes allowed to fetch-ahead: default = 100KB"
@type prefetch_bytes :: non_neg_integer()
@typedoc "The offset from wthich to begin fetch requests: default = latest"
@type begin_offset :: non_neg_integer()
@typedoc "How to reset begin_offset if OffsetOutOfRange exception is received"
@type offset_reset_policy :: :reset_to_earliest | :reset_to_latest
@typedoc "Values to configure the consumer, all are optional"
@type consumer_config :: [
min_bytes: min_bytes(),
max_bytes: max_bytes(),
max_wait_time: max_wait_time(),
sleep_timeout: sleep_timeout(),
prefetch_count: prefetch_count(),
prefetch_bytes: prefetch_bytes(),
begin_offset: begin_offset(),
offset_reset_policy: offset_reset_policy()
]
@typedoc "keyword list of config values to start elsa consumer"
@type init_opts :: [
connection: Elsa.connection(),
endpoints: Elsa.endpoints(),
group: group(),
topics: [Elsa.topic()],
assignment_received_handler: assignment_received_handler(),
assignments_revoked_handler: assignments_revoked_handler(),
handler: handler(),
handler_init_args: term(),
config: consumer_config()
]
@default_delay 5_000
defmodule State do
@moduledoc """
The running state of the consumer group manager process.
"""
defstruct [
:connection,
:group,
:topics,
:config,
:supervisor_pid,
:group_coordinator_pid,
:acknowledger_pid,
:assignment_received_handler,
:assignments_revoked_handler,
:start_time,
:delay,
:handler,
:handler_init_args,
:workers,
:generation_id
]
end
@doc """
Provides convenience for backward compatibility with previous versions of Elsa where acking for
a consumer group was handled by the Elsa.Group.Manager module.
"""
defdelegate ack(connection, topic, partition, generation_id, offset), to: Elsa.Group.Acknowledger
def get_committed_offsets(_pid, _topic) do
{:ok, []}
end
@doc """
Trigger the assignment of workers to a given topic and partition
"""
@spec assignments_received(GenServer.server(), term(), generation_id(), [tuple()]) :: :ok
def assignments_received(pid, group_member_id, generation_id, assignments) do
GenServer.call(pid, {:process_assignments, group_member_id, generation_id, assignments})
end
@doc """
Trigger deallocation of all workers from the consumer group and stop
worker processes.
"""
@spec assignments_revoked(GenServer.server()) :: :ok
def assignments_revoked(pid) do
GenServer.call(pid, :revoke_assignments, 30_000)
end
@doc """
Start the group manager process and register a name with the process registry.
"""
@spec start_link(init_opts) :: GenServer.on_start()
def start_link(opts) do
connection = Keyword.fetch!(opts, :connection)
GenServer.start_link(__MODULE__, opts, name: {:via, Elsa.Registry, {registry(connection), __MODULE__}})
end
def init(opts) do
Process.flag(:trap_exit, true)
state = %State{
group: Keyword.fetch!(opts, :group),
connection: Keyword.fetch!(opts, :connection),
topics: Keyword.fetch!(opts, :topics),
supervisor_pid: Keyword.fetch!(opts, :supervisor_pid),
assignment_received_handler: Keyword.get(opts, :assignment_received_handler, fn _g, _t, _p, _gen -> :ok end),
assignments_revoked_handler: Keyword.get(opts, :assignments_revoked_handler, fn -> :ok end),
start_time: :erlang.system_time(:milli_seconds),
delay: Keyword.get(opts, :delay, @default_delay),
handler: Keyword.fetch!(opts, :handler),
handler_init_args: Keyword.get(opts, :handler_init_args, %{}),
config: Keyword.get(opts, :config, []),
workers: %{}
}
{:ok, state, {:continue, :initialize}}
end
def handle_continue(:initialize, state) do
with {:ok, group_coordinator_pid} <- start_group_coordinator(state),
{:ok, acknowledger_pid} <- start_acknowledger(state) do
{:noreply, %{state | group_coordinator_pid: group_coordinator_pid, acknowledger_pid: acknowledger_pid}}
else
{:error, reason} ->
{:stop, reason, state}
end
end
def handle_call({:process_assignments, _member_id, generation_id, assignments}, _from, state) do
Logger.debug(fn -> "#{__MODULE__}: process assignments #{inspect(assignments)}" end)
case call_lifecycle_assignment_received(state, assignments, generation_id) do
{:error, reason} ->
{:stop, reason, {:error, reason}, state}
:ok ->
Elsa.Group.Acknowledger.update_generation_id(
{:via, Elsa.Registry, {registry(state.connection), Elsa.Group.Acknowledger}},
generation_id
)
new_workers = start_workers(state, generation_id, assignments)
{:reply, :ok, %{state | workers: new_workers, generation_id: generation_id}}
end
end
def handle_call(:revoke_assignments, _from, state) do
Logger.info("Assignments revoked for group #{state.group}")
new_workers = WorkerManager.stop_all_workers(state.connection, state.workers)
:ok = apply(state.assignments_revoked_handler, [])
{:reply, :ok, %{state | workers: new_workers, generation_id: nil}}
end
def handle_info({:DOWN, ref, :process, object, reason}, state) do
Logger.debug(fn -> "#{__MODULE__}: worker death: #{inspect(object)} - #{inspect(reason)}" end)
new_workers = WorkerManager.restart_worker(state.workers, ref, state)
{:noreply, %{state | workers: new_workers}}
end
def handle_info({:EXIT, _pid, reason}, %State{delay: delay, start_time: started} = state) do
lifetime = :erlang.system_time(:milli_seconds) - started
max(delay - lifetime, 0)
|> Process.sleep()
{:stop, reason, state}
end
def terminate(reason, state) do
Logger.debug(fn -> "#{__MODULE__} : Terminating #{state.connection}" end)
WorkerManager.stop_all_workers(state.connection, state.workers)
shutdown_and_wait(state.acknowledger_pid)
shutdown_and_wait(state.group_coordinator_pid)
reason
end
defp call_lifecycle_assignment_received(state, assignments, generation_id) do
Enum.reduce_while(assignments, :ok, fn brod_received_assignment(topic: topic, partition: partition), :ok ->
case apply(state.assignment_received_handler, [state.group, topic, partition, generation_id]) do
:ok -> {:cont, :ok}
{:error, reason} -> {:halt, {:error, reason}}
end
end)
end
defp start_workers(state, generation_id, assignments) do
Enum.reduce(assignments, state.workers, fn assignment, workers ->
WorkerManager.start_worker(workers, generation_id, assignment, state)
end)
end
defp start_group_coordinator(state) do
with {:ok, group_coordinator_pid} <-
:brod_group_coordinator.start_link(
state.connection,
state.group,
state.topics,
state.config,
__MODULE__,
self()
) do
Elsa.Registry.register_name({registry(state.connection), :brod_group_coordinator}, group_coordinator_pid)
{:ok, group_coordinator_pid}
end
end
defp start_acknowledger(state) do
Elsa.Group.Acknowledger.start_link(connection: state.connection)
end
defp shutdown_and_wait(pid) do
Process.exit(pid, :shutdown)
receive do
{:EXIT, ^pid, _} ->
:ok
after
5_000 -> :ok
end
end
end
|
lib/elsa/group/manager.ex
| 0.71889
| 0.46217
|
manager.ex
|
starcoder
|
defmodule Arrow do
@typedoc """
Functor dictionary
intuitive type: fmap : (a -> b) -> f a -> f b
* `map`: (f a, a -> b) -> f b # params are swapped to facilitate piping, mandatory
* `lift_left`: a -> f b -> f a # default implementation provided, optional
"""
@type t :: %__MODULE__{
arr: (any -> any),
}
def __struct__, do: %{
__struct__: __MODULE__,
category: Category.__struct__(),
arr: fn _, _ -> raise("Functor: missing definition for fmap") end,
first: fn _, _ -> raise("Functor: missing definition for lift_left") end,
second: fn _, _ -> raise("Functor: missing definition for lift_left") end,
parallel: fn _, _ -> raise("Functor: missing defn") end,
fanout: fn _, _ -> raise("Arrow: missing definition for fanout/2") end,
}
def __struct__(kv) do
required_keys = [
:arr,
:first,
:second,
:parallel,
:fanout
]
{map, keys} =
Enum.reduce(kv, {__struct__(), required_keys}, fn {key, val}, {map, keys} ->
{Map.replace!(map, key, val), List.delete(keys, key)}
end)
case keys do
[] ->
map
_ ->
raise ArgumentError,
"the following keys must also be given when building " <>
"struct #{inspect(__MODULE__)}: #{inspect(keys)}"
end
end
def define(base_dict) do
base_dict = Map.new(base_dict)
category = Map.fetch!(base_dict, :category)
{arr, first, parallel} = case base_dict do
%{arr: arr, first: first} ->
parallel = Map.get(base_dict, :parallel, fn ar1, ar2 ->
c = category
swap = arr.(fn {x, y} -> {y, x} end)
first.(ar1) |> c.>>>.(swap) |> c.>>>.(first.(ar2)) |> c.>>>.(swap)
end)
{arr, first, parallel}
%{arr: arr, parallel: parallel} ->
first = Map.get(base_dict, :first, fn ar -> parallel.(ar, category.id) end)
{arr, first, parallel}
_ ->
raise("Bifunctor minimal definition require `arr` and either `first` or `parallel`")
end
second = Map.get(base_dict, :second, fn ar -> parallel.(category.id, ar) end)
fanout = Map.get(base_dict, :fanout, fn ar1, ar2 ->
c = category
arr.(fn x -> {x, x} end) |> c.>>>.(parallel.(ar1, ar2))
end)
%__MODULE__{
category: category,
arr: arr,
first: first,
second: second,
parallel: parallel,
fanout: fanout
}
end
end
|
typeclassopedia/lib/arrow.ex
| 0.857843
| 0.466663
|
arrow.ex
|
starcoder
|
defmodule Tai.Venues.Config do
@moduledoc """
Venue configuration for a `tai` instance. This module provides a utility
function `Tai.Venues.Config.parse/0` that hydrates a list of venues from
the `%Tai.Config{}`.
It can be configured with the following options:
```
config :tai,
venues: %{
okex: [
# Module that implements the `Tai.Venues.Adapter` behaviour
adapter: Tai.VenueAdapters.OkEx,
# [default: %Tai.Config#adapter_timeout] [optional] Per venue override for start
# timeout in milliseconds
timeout: 120_000,
# [default: true] [optional] Starts the venue on initial boot
start_on_boot: true,
# [default: []] [optional] Subscribe to venue specific channels
channels: [],
# [default: "*"] [optional] A `juice` query matching on alias and symbol, or `{module, func_name}`
# to filter available products. Juice query syntax is described in more detail at
# https://github.com/rupurt/juice#usage
products: "eth_usd_200925 eth_usd_bi_quarter",
# [default: "*"] [optional] A `juice` query matching on alias and symbol, or `{module, func_name}`
# to filter streaming order books from available products. Juice query syntax is described in more
# detail at https://github.com/rupurt/juice#usage
order_books: "* -eth_usd_200925",
# [default: 1] [optional] The number of streaming order book levels to maintain. This
# value has adapter specific support. For example some venues may only allow you to
# subscribe in blocks of 5 price points. So supported values for that venue
# are `5`, `10`, `15`, ...
quote_depth: 1,
# [default: "*"] [optional] A juice query matching on asset to filter available accounts.
# Juice query syntax is described in more detail at https://github.com/rupurt/juice#usage
accounts: "*",
# [default: %{}] [optional] `Map` of named credentials to use private API's on the venue
credentials: %{
main: %{
api_key: {:system_file, "OKEX_API_KEY"},
api_secret: {:system_file, "OKEX_API_SECRET"},
api_passphrase: {:system_file, "OKEX_API_PASSPHR<PASSWORD>"}
}
},
# [default: %{}] [optional] `Map` of extra venue configuration parameters for non-standard
# tai functionality.
opts: %{},
]
}
```
"""
@type venue :: Tai.Venue.t()
@spec parse() :: [venue]
@spec parse(Tai.Config.t()) :: [venue]
def parse(config \\ Tai.Config.parse()) do
config.venues
|> Enum.map(fn {id, params} ->
%Tai.Venue{
id: id,
adapter: fetch!(params, :adapter),
channels: get(params, :channels, []),
products: get(params, :products, "*"),
order_books: get(params, :order_books, "*"),
accounts: get(params, :accounts, "*"),
credentials: get(params, :credentials, %{}),
quote_depth: get(params, :quote_depth, 1),
start_on_boot: get(params, :start_on_boot, true),
opts: get(params, :opts, %{}),
timeout: get(params, :timeout, config.adapter_timeout),
stream_heartbeat_interval: get(params, :stream_heartbeat_interval, 5000),
stream_heartbeat_timeout: get(params, :stream_heartbeat_timeout, 3000),
broadcast_change_set: get(params, :broadcast_change_set, config.broadcast_change_set)
}
end)
end
defp get(env, key, default), do: Keyword.get(env, key, default)
defp fetch!(env, key), do: Keyword.fetch!(env, key)
end
|
apps/tai/lib/tai/venues/config.ex
| 0.886969
| 0.807157
|
config.ex
|
starcoder
|
defmodule Workex do
@moduledoc """
A behaviour which separates message receiving and aggregating from message processing.
Example:
defmodule Consumer do
use Workex
# Interface functions are invoked inside client processes
def start_link do
Workex.start_link(__MODULE__, nil)
end
def push(pid, item) do
Workex.push(pid, item)
end
# Callback functions run in the worker process
def init(_), do: {:ok, nil}
def handle(data, state) do
Processor.long_op(data)
{:ok, state}
end
end
The `callback` module must export following functions:
- init/1 - receives `arg` and should return `{:ok, initial_state}` or `{:stop, reason}`.
- handle/2 - receives aggregated messages and the state, and should return `{:ok, new_state}`
or `{:stop, reason}`.
- handle_message/2 - optional message handler
The `Workex` starts two processes. The one returned by `Workex.start_link/4` is the "facade"
process which can be used as the target for messages. This is also the process which aggregates
messages.
Callback functions will run in the worker process, which is started by the "main" process.
Thus, consuming is done concurrently to message aggregation.
Both processes are linked, and the main process traps exits. Termination of the worker process
will cause the main process to terminate with the same exit reason.
"""
defstruct [
:worker_pid,
:aggregate,
:worker_available,
:max_size,
:replace_oldest,
pending_responses: MapSet.new,
processing_responses: MapSet.new
]
use ExActor.Tolerant
use Behaviour
@typep worker_state :: any
@type workex_options ::
[{:aggregate, Workex.Aggregate.t} |
{:max_size, pos_integer} |
{:replace_oldest, boolean}]
@typep handle_response ::
{:ok, worker_state} |
{:ok, worker_state, pos_integer | :hibernate} |
{:stop, reason :: any} |
{:stop, reason :: any, worker_state}
defcallback init(any) :: {:ok, worker_state} | {:ok, worker_state, pos_integer | :hibernate} | {:stop, reason :: any}
defcallback handle(Workex.Aggregate.value, worker_state) :: handle_response
defcallback handle_message(any, worker_state) :: handle_response
defmacro __using__(_) do
quote do
@behaviour unquote(__MODULE__)
def handle_message(_, state), do: {:ok, state}
defoverridable handle_message: 2
end
end
alias Workex.Aggregate
@doc """
Starts aggregator and worker processes.
See `start_link/4` for detailed description.
"""
@spec start(module, any, workex_options, GenServer.options) :: GenServer.on_start
defstart start(callback, arg, opts \\ []), gen_server_opts: :runtime
@doc """
Starts aggregator and worker processes.
Possible options are:
- `aggregate` - Aggregation instance. Defaults to `%Workex.Queue{}`. Must implement `Workex.Aggregated`.
- `max_size` - Maximum number of messages in the buffer after which new messages are discarded.
- `replace_oldest` - Alters behavior of `max_size`. When the buffer is full, new message replaces the
oldest one.
"""
@spec start_link(module, any, workex_options, GenServer.options) :: GenServer.on_start
defstart start_link(callback, arg, opts \\ []), gen_server_opts: :runtime
definit {callback, arg, opts} do
Process.flag(:trap_exit, true)
case (Workex.Worker.start_link(self, callback, arg)) do
{:ok, worker_pid} ->
%__MODULE__{
aggregate: opts[:aggregate] || %Workex.Queue{},
worker_pid: worker_pid,
max_size: opts[:max_size] || :unbound,
replace_oldest: opts[:replace_oldest] || false,
worker_available: true
}
|> initial_state
{:error, reason} -> {:stop, reason}
end
end
@doc """
Pushes a new message, returns immediately.
"""
@spec push(GenServer.server, any) :: :ok
defcast push(message), state: state do
{_, state} = add_and_notify(state, message)
new_state(state)
end
@doc """
Pushes a new message and returns as soon as the message is queued (or rejected).
"""
@spec push_ack(GenServer.server, any, non_neg_integer | :infinity) :: :ok | {:error, reason :: any}
defcall push_ack(message), timeout: timeout \\ 5000, state: state do
{response, state} = add_and_notify(state, message)
set_and_reply(state, response)
end
@doc """
Pushes a new message and returns after the message is processed (or rejected).
"""
@spec push_block(GenServer.server, any, non_neg_integer | :infinity) :: :ok | {:error, reason :: any}
defcall push_block(message), timeout: timeout \\ 5000, state: state, from: from do
{response, state} = add_and_notify(state, message, from)
case response do
:ok -> new_state(state)
_ -> set_and_reply(state, response)
end
end
defp add_and_notify(
%__MODULE__{
aggregate: aggregate,
max_size: max_size,
worker_available: worker_available,
replace_oldest: replace_oldest
} = state,
message,
from \\ nil
) do
if (not worker_available) and Aggregate.size(aggregate) == max_size do
if replace_oldest do
aggregate
|> Aggregate.remove_oldest
|> Aggregate.add(message)
|> handle_add(state, from)
else
{{:error, :max_capacity}, state}
end
else
aggregate
|> Aggregate.add(message)
|> handle_add(state, from)
end
end
defp handle_add(add_result, state, from) do
case add_result do
{:ok, new_aggregate} ->
{:ok,
%__MODULE__{state | aggregate: new_aggregate}
|> add_pending_response(from)
|> maybe_notify_worker
}
error ->
{error, state}
end
end
defp add_pending_response(state, nil), do: state
defp add_pending_response(%__MODULE__{pending_responses: pending_responses} = state, from) do
%__MODULE__{state | pending_responses: MapSet.put(pending_responses, from)}
end
defp maybe_notify_worker(
%__MODULE__{
worker_available: true,
worker_pid: worker_pid,
aggregate: aggregate,
pending_responses: pending_responses
} = state
) do
unless Aggregate.size(aggregate) == 0 do
{payload, aggregate} = Aggregate.value(aggregate)
Workex.Worker.process(worker_pid, payload)
%__MODULE__{state |
worker_available: false,
aggregate: aggregate,
processing_responses: pending_responses,
pending_responses: MapSet.new
}
else
state
end
end
defp maybe_notify_worker(state), do: state
defhandleinfo {:workex, :worker_available}, state: state do
%__MODULE__{state | worker_available: true}
|> notify_pending
|> maybe_notify_worker
|> new_state
end
defp notify_pending(%__MODULE__{processing_responses: processing_responses} = state) do
Enum.each(processing_responses, &GenServer.reply(&1, :ok))
%__MODULE__{state | processing_responses: MapSet.new}
end
defhandleinfo {:EXIT, worker_pid, reason}, state: %__MODULE__{worker_pid: worker_pid} do
stop_server(reason)
end
defhandleinfo _, do: noreply
end
|
lib/workex.ex
| 0.781914
| 0.615146
|
workex.ex
|
starcoder
|
defmodule Blogit.Settings do
@moduledoc """
Provides application-wide settings for `Blogit`.
All of the functions in this modules are settings, which can be reused
through the other modules.
"""
@posts_folder "posts"
@meta_divider "\n---\n"
@doc """
Retrieves the list of supported languages configured for `Blogit`.
The languages can be configured like this:
```
config :blogit,
repository_url: some-url, repository_provider: some-provider,
languages: ~w(bg en)
```
For every configured language a separate set of posts and blog configuration
is kept. Every language has its own `Blogit.Components.*` processes running,
that can be queried.
By default the list of languages is `["en"]`.
## Examples
iex> Application.put_env(:blogit, :languages, ["bg"])
iex> Blogit.Settings.languages()
["bg"]
iex> Application.delete_env(:blogit, :languages) # Use default
iex> Blogit.Settings.languages()
["en"]
"""
@spec languages() :: [String.t()]
def languages, do: Application.get_env(:blogit, :languages, ~w(en))
@doc """
Returns the default language configured for `Blogit`. This is the first
language in the list of languages configured like this:
```
config :blogit,
repository_url: some-url, repository_provider: some-provider,
languages: ~w(bg en)
```
By default it is `"en"` for English.
## Examples
iex> Application.put_env(:blogit, :languages, ["bg", "de"])
iex> Blogit.Settings.default_language()
"bg"
iex> Application.delete_env(:blogit, :languages) # Use default
iex> Blogit.Settings.default_language()
"en"
"""
@spec default_language() :: String.t()
def default_language, do: languages() |> List.first()
@doc """
Returns a list of the additional or secondary languages configured for
`Bogit`. These are all the language without the first one from `:languages`
configuration like this one:
```
config :blogit,
repository_url: some-url, repository_provider: some-provider,
languages: ~w(bg en)
```
By default it is an empty list.
## Examples
iex> Application.put_env(:blogit, :languages, ["es", "bg", "de"])
iex> Blogit.Settings.additional_languages()
["bg", "de"]
iex> Application.delete_env(:blogit, :languages) # Use default
iex> Blogit.Settings.additional_languages()
[]
"""
@spec additional_languages() :: [String.t()]
def additional_languages do
[_ | rest] = languages()
rest
end
@doc """
Returns the path to the configuration file of `Blogit`. It can be configured
like this:
```
config :blogit,
repository_url: some-url, repository_provider: some-provider,
configuration_file: path-to-the-file.yml
```
By default it is in the root of the repository and called 'blog.yml'.
## Examples
iex> Application.put_env(:blogit, :configuration_file, "my_conf.yml")
iex> Blogit.Settings.configuration_file()
"my_conf.yml"
iex> Application.delete_env(:blogit, :configuration_file) # Use default
iex> Blogit.Settings.configuration_file()
"blog.yml"
"""
@spec configuration_file() :: String.t()
def configuration_file do
Application.get_env(:blogit, :configuration_file, "blog.yml")
end
@doc """
Returns the string `"posts"` - the location of the posts folder in the
repository. A custom provider can opt to not use this default location, but
the `Blogit.RepositoryProviders.Git` provider expects posts to be located
in a folder named `posts` in the root of the repository.
## Examples
iex> Blogit.Settings.posts_folder()
"posts"
"""
@spec posts_folder() :: String.t()
def posts_folder, do: @posts_folder
@doc ~S"""
Returns the string `"\n---\n"` - the divider used to separate the meta data
from the post content in the post markdown files. It's advisable to have it on
two places - before and after the metadata:
```
---
author: Dali
---
# My meta content
```
## Examples
iex> Blogit.Settings.meta_divider()
"\n---\n"
"""
@spec meta_divider() :: String.t()
def meta_divider, do: @meta_divider
@doc """
Returns how many lines of the source file of a post should be used for
generating its preview. Can be configured in the `Blogit` configuration:
```
config :blogit,
repository_url: some-url, repository_provider: some-provider,
max_lines_in_preview: 30
```
If not configured its default value is `10`.
## Examples
iex> Application.put_env(:blogit, :max_lines_in_preview, 30)
iex> Blogit.Settings.max_lines_in_preview()
30
iex> Application.delete_env(:blogit, :max_lines_in_preview) # Use default
iex> Blogit.Settings.max_lines_in_preview()
10
"""
@spec max_lines_in_preview() :: pos_integer
def max_lines_in_preview do
Application.get_env(:blogit, :max_lines_in_preview, 10)
end
@doc """
Returns true if polling the source repository for changes is configured to be
'on'. That can be done like this:
```
config :blogit,
repository_url: some-url, repository_provider: some-provider,
polling: true
```
The default is `true` if it is not configured.
## Examples
iex> Application.put_env(:blogit, :polling, true)
iex> Blogit.Settings.polling?()
true
iex> Application.put_env(:blogit, :polling, false)
iex> Blogit.Settings.polling?()
false
iex> Application.delete_env(:blogit, :polling) # Use default
iex> Blogit.Settings.polling?()
true
"""
@spec polling?() :: boolean
def polling?, do: Application.get_env(:blogit, :polling, true)
@doc """
Returns the interval used when polling the source repository for changes.
It is used only if `Blogit.Settings.polling?()` returns `true`.
The value is the interval in milliseconds. The configuration is specified
in seconds, like this:
```
config :blogit,
repository_url: some-url, repository_provider: some-provider,
polling: true, poll_interval: 50
```
The default is `10_000` for ten seconds.
## Examples
iex> Application.put_env(:blogit, :poll_interval, 60)
iex> Blogit.Settings.poll_interval()
60_000
iex> Application.delete_env(:blogit, :poll_interval) # Use default
iex> Blogit.Settings.poll_interval()
10_000
"""
@spec poll_interval() :: pos_integer
def poll_interval, do: Application.get_env(:blogit, :poll_interval, 10) * 1000
end
|
lib/blogit/settings.ex
| 0.916843
| 0.657153
|
settings.ex
|
starcoder
|
defmodule Personnummer do
@moduledoc """
Validate Swedish personal identity numbers `Personnummer`.
"""
defstruct [:date, :serial, :control, :separator, :coordination]
def new(pnr_string) do
matches =
Regex.run(~r/^(\d{2}){0,1}(\d{2})(\d{2})(\d{2})([-+]{0,1})(\d{3})(\d{0,1})$/, pnr_string)
if is_nil(matches) or matches |> length < 7 do
{:error, nil}
else
matches
|> from_matches
end
end
@doc """
Formats a personal identity number in short format.
## Examples
iex> {_, p} = Personnummer.new("199001011234")
iex> Personnummer.format(p)
"900101-1234"
iex> {_, p} = Personnummer.new("199001010001")
iex> Personnummer.format(p)
"900101-0001"
iex> {_, p} = Personnummer.new("199001610001")
iex> Personnummer.format(p)
"900161-0001"
"""
def format(pnr) do
Personnummer.format(pnr, true)
|> String.slice(2..-1)
end
@doc """
Formats a personal identity number in long format.
## Examples
iex> {_, p} = Personnummer.new("9001011234")
iex> Personnummer.format(p, true)
"19900101-1234"
"""
def format(pnr, true) do
day =
if pnr.coordination do
pnr.date.day + 60
else
pnr.date.day
end
month =
pnr.date.month
|> Integer.to_string()
|> String.pad_leading(2, "0")
day =
day
|> Integer.to_string()
|> String.pad_leading(2, "0")
serial =
pnr.serial
|> Integer.to_string()
|> String.pad_leading(3, "0")
"#{pnr.date.year}#{month}#{day}-#{serial}#{pnr.control}"
end
@doc """
Checks if the personal identity number is valid. Requres a valid date and a
valid last four digits.
## Examples
iex> {_, p} = %Personnummer{}
iex> Personnummer.valid?(p)
false
"""
def valid?(_ = %Personnummer{date: nil}) do
false
end
@doc """
Checks if the personal identity number is valid. Requres a valid date and a
valid last four digits.
## Examples
iex> {_, p1} = Personnummer.new("19900101-0017")
iex> {_, p2} = Personnummer.new("19900101-0018")
iex> Personnummer.valid?(p1)
true
iex> Personnummer.valid?(p2)
false
"""
def valid?(pnr = %Personnummer{}) do
short_date =
Personnummer.format(pnr)
|> String.slice(0..5)
serial =
pnr.serial
|> Integer.to_string()
|> String.pad_leading(3, "0")
pnr.serial > 0 && luhn_checksum("#{short_date}#{serial}") == pnr.control
end
@doc """
Quick validation function to just validate a personal identity number from a
string without preserving the struct with the personal identity number data.
## Examples
iex> Personnummer.valid?("19900101-0017")
true
iex> Personnummer.valid?("19900101-0019")
false
iex> Personnummer.valid?("bogus")
false
iex> Personnummer.valid?("903030-0017")
false
"""
def valid?(pnr_str) when is_binary(pnr_str) do
case Personnummer.new(pnr_str) do
{:error, nil} -> false
{:ok, pnr} -> Personnummer.valid?(pnr)
end
end
@doc ~S"""
Get the age of the person holding the personal identity number.
## Examples
iex> now = DateTime.utc_now()
iex> {_, x} = Date.new(now.year - 20, now.month, now.day)
iex> pnr = "#{x.year}0101-1234"
iex> {_, p} = Personnummer.new(pnr)
iex> Personnummer.get_age(p)
20
"""
def get_age(pnr) do
now = DateTime.utc_now()
years_since_born = now.year - pnr.date.year
cond do
pnr.date.month > now.month -> years_since_born - 1
pnr.date.month == now.month && pnr.date.day > now.day -> years_since_born - 1
true -> years_since_born
end
end
@doc """
Returns true if the person behind the personal identity number is a female.
## Examples
iex> {_, p} = Personnummer.new("19090903-6600")
iex> Personnummer.is_female?(p)
true
"""
def is_female?(pnr) do
pnr.serial
|> rem(10)
|> rem(2) == 0
end
@doc """
Returns true if the person behind the personal identity number is a male.
## Examples
iex> {_, p} = Personnummer.new("19900101-0017")
iex> Personnummer.is_male?(p)
true
"""
def is_male?(pnr) do
!Personnummer.is_female?(pnr)
end
@doc """
Returns true if the parsed personal identity number is a coordination number.
## Examples
iex> {_, p} = Personnummer.new("800161-3294")
iex> Personnummer.is_coordination_number(p)
true
"""
def is_coordination_number(pnr) do
pnr.coordination
end
defp matches_to_map(matches) do
century =
if Enum.at(matches, 1) == "" do
1900
else
integer_at(matches, 1) * 100
end
%{
century: century,
year: integer_at(matches, 2),
month: integer_at(matches, 3),
day: integer_at(matches, 4),
serial: integer_at(matches, 6),
control: integer_at(matches, 7),
separator: Enum.at(matches, 5)
}
end
defp from_matches(matches) do
matched_map = matches_to_map(matches)
{day, is_coordination} =
if matched_map.day > 60 do
{matched_map.day - 60, true}
else
{matched_map.day, false}
end
{date_result, date} = Date.new(matched_map.century + matched_map.year, matched_map.month, day)
if date_result == :error do
{:error, nil}
else
{:ok,
%Personnummer{
date: date,
serial: matched_map.serial,
control: matched_map.control,
separator: matched_map.separator,
coordination: is_coordination
}}
end
end
defp integer_at(matches, pos) do
Enum.at(matches, pos)
|> Integer.parse()
|> elem(0)
end
defp luhn_checksum(digits) do
(10 -
(digits
|> String.split("", trim: true)
|> Enum.map(&String.to_integer/1)
|> Enum.with_index()
|> List.foldl(0, fn {digit, i}, acc -> acc + digit_to_add(i, digit) end)
|> rem(10)))
|> checksum
end
defp checksum(c) do
case c do
10 -> 0
_ -> c
end
end
defp digit_to_add(i, digit) do
if rem(i, 2) == 0 do
(digit * 2)
|> double_digit
else
digit
end
end
defp double_digit(digit) do
if digit > 9 do
digit - 9
else
digit
end
end
end
|
lib/personnummer.ex
| 0.814496
| 0.415788
|
personnummer.ex
|
starcoder
|
defmodule NervesHub.HTTPFwupStream do
@moduledoc """
Handles streaming a fwupdate via HTTP.
a `callback` module is expected to be passed to `start_link/1`
messages will be recieved in the shape:
* `{:fwup_message, message}` - see the docs for
[Fwup](https://hexdocs.pm/fwup/) for more info.
* `{:http_error, {status_code, body}}`
* `{:http_error, :timeout}`
* `{:http_error, :too_many_redirects}`
"""
use GenServer
require Logger
@redirect_status_codes [301, 302, 303, 307, 308]
def start_link(cb) do
start_httpc()
GenServer.start_link(__MODULE__, [cb])
end
def stop(pid) do
GenServer.stop(pid)
end
def get(pid, url) do
GenServer.call(pid, {:get, url}, :infinity)
end
def init([cb]) do
devpath = Nerves.Runtime.KV.get("nerves_fw_devpath") || "/dev/mmcblk0"
args = ["--apply", "--no-unmount", "-d", devpath, "--task", "upgrade"]
args =
Enum.reduce(NervesHub.Certificate.public_keys(), args, fn public_key, args ->
args ++ ["--public-key", public_key]
end)
{:ok, fwup} = Fwup.stream(cb, args)
{:ok,
%{
url: nil,
callback: cb,
content_length: 0,
buffer: "",
buffer_size: 0,
filename: "",
caller: nil,
number_of_redirects: 0,
timeout: 15000,
fwup: fwup
}}
end
def terminate(:normal, state) do
GenServer.stop(state.fwup, :normal)
:noop
end
def terminate({:error, reason}, state) do
state.caller && GenServer.reply(state.caller, reason)
state.callback && send(state.callback, reason)
GenServer.stop(state.fwup, :normal)
end
def handle_call({:get, _url}, _from, %{number_of_redirects: n} = s) when n > 5 do
{:stop, {:error, {:http_error, :too_many_redirects}}, s}
end
def handle_call({:get, url}, from, s) do
headers = [
{'Content-Type', 'application/octet-stream'}
]
http_opts = [timeout: :infinity, autoredirect: false]
opts = [stream: :self, receiver: self(), sync: false]
:httpc.request(
:get,
{String.to_charlist(url), headers},
http_opts,
opts,
:nerves_hub_fwup_stream
)
{:noreply, %{s | url: url, caller: from}}
end
def handle_info({:http, {_, :stream_start, headers}}, s) do
Logger.debug("Stream Start: #{inspect(headers)}")
content_length =
case Enum.find(headers, fn {key, _} -> key == 'content-length' end) do
nil ->
0
{_, content_length} ->
{content_length, _} =
content_length
|> to_string()
|> Integer.parse()
content_length
end
filename =
case Enum.find(headers, fn {key, _} -> key == 'content-disposition' end) do
nil ->
Path.basename(s.url)
{_, filename} ->
filename
|> to_string
|> String.split(";")
|> List.last()
|> String.trim()
|> String.trim("filename=")
end
{:noreply, %{s | content_length: content_length, filename: filename}}
end
def handle_info({:http, {_, :stream, data}}, s) do
Fwup.send_chunk(s.fwup, data)
{:noreply, s, s.timeout}
end
def handle_info({:http, {_, :stream_end, _headers}}, s) do
Logger.debug("Stream End")
GenServer.reply(s.caller, {:ok, s.buffer})
{:noreply, %{s | filename: "", content_length: 0, buffer: "", buffer_size: 0, url: nil}}
end
def handle_info({:http, {_ref, {{_, status_code, _}, headers, body}}}, s)
when status_code in @redirect_status_codes do
Logger.debug("Redirect")
case Enum.find(headers, fn {key, _} -> key == 'location' end) do
{'location', next_url} ->
handle_call({:get, List.to_string(next_url)}, s.caller, %{
s
| buffer: "",
buffer_size: 0,
number_of_redirects: s.number_of_redirects + 1
})
_ ->
{:stop, {:http_error, {status_code, body}}, s}
end
end
def handle_info({:http, {_ref, {{_, status_code, _}, _headers, body}}}, s) do
Logger.error("Error: #{status_code} #{inspect(body)}")
{:stop, {:error, {:http_error, {status_code, body}}}, s}
end
def handle_info(:timeout, s) do
Logger.error("Error: timeout")
{:stop, {:error, {:http_error, :timeout}}, s}
end
defp start_httpc() do
:inets.start(:httpc, profile: :nerves_hub_fwup_stream)
opts = [
max_sessions: 8,
max_keep_alive_length: 4,
max_pipeline_length: 4,
keep_alive_timeout: 120_000,
pipeline_timeout: 60_000
]
:httpc.set_options(opts, :nerves_hub_fwup_stream)
end
end
|
lib/nerves_hub/http_fwup_stream.ex
| 0.765769
| 0.402304
|
http_fwup_stream.ex
|
starcoder
|
defmodule AWS.IoT do
@moduledoc """
AWS IoT
AWS IoT provides secure, bi-directional communication between
Internet-connected things (such as sensors, actuators, embedded devices, or
smart appliances) and the AWS cloud. You can discover your custom IoT-Data
endpoint to communicate with, configure rules for data processing and
integration with other services, organize resources associated with each
thing (Thing Registry), configure logging, and create and manage policies
and credentials to authenticate things.
For more information about how AWS IoT works, see the [Developer
Guide](http://docs.aws.amazon.com/iot/latest/developerguide/aws-iot-how-it-works.html).
"""
@doc """
Accepts a pending certificate transfer. The default state of the
certificate is INACTIVE.
To check for pending certificate transfers, call `ListCertificates` to
enumerate your certificates.
"""
def accept_certificate_transfer(client, certificate_id, input, options \\ []) do
url = "/accept-certificate-transfer/#{URI.encode(certificate_id)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Attaches the specified policy to the specified principal (certificate or
other credential).
"""
def attach_principal_policy(client, policy_name, input, options \\ []) do
url = "/principal-policies/#{URI.encode(policy_name)}"
headers = []
if Map.has_key?(input, "principal") do
headers = [{"x-amzn-iot-principal", input["principal"]}|headers]
input = Map.delete(input, "principal")
request(client, :put, url, headers, input, options, nil)
else
request(client, :put, url, headers, input, options, nil)
end
end
@doc """
Attaches the specified principal to the specified thing.
"""
def attach_thing_principal(client, thing_name, input, options \\ []) do
url = "/things/#{URI.encode(thing_name)}/principals"
headers = []
if Map.has_key?(input, "principal") do
headers = [{"x-amzn-principal", input["principal"]}|headers]
input = Map.delete(input, "principal")
request(client, :put, url, headers, input, options, nil)
else
request(client, :put, url, headers, input, options, nil)
end
end
@doc """
Cancels a pending transfer for the specified certificate.
**Note** Only the transfer source account can use this operation to cancel
a transfer. (Transfer destinations can use `RejectCertificateTransfer`
instead.) After transfer, AWS IoT returns the certificate to the source
account in the INACTIVE state. After the destination account has accepted
the transfer, the transfer cannot be cancelled.
After a certificate transfer is cancelled, the status of the certificate
changes from PENDING_TRANSFER to INACTIVE.
"""
def cancel_certificate_transfer(client, certificate_id, input, options \\ []) do
url = "/cancel-certificate-transfer/#{URI.encode(certificate_id)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Creates an X.509 certificate using the specified certificate signing
request.
**Note** Reusing the same certificate signing request (CSR) results in a
distinct certificate.
You can create multiple certificates in a batch by creating a directory,
copying multiple .csr files into that directory, and then specifying that
directory on the command line. The following commands show how to create a
batch of certificates given a batch of CSRs.
Assuming a set of CSRs are located inside of the directory
my-csr-directory:
On Linux and OS X, the command is:
$ ls my-csr-directory/ | xargs -I {} aws iot create-certificate-from-csr
--certificate-signing-request file://my-csr-directory/{}
This command lists all of the CSRs in my-csr-directory and pipes each CSR
file name to the aws iot create-certificate-from-csr AWS CLI command to
create a certificate for the corresponding CSR.
The aws iot create-certificate-from-csr part of the command can also be run
in parallel to speed up the certificate creation process:
$ ls my-csr-directory/ | xargs -P 10 -I {} aws iot
create-certificate-from-csr --certificate-signing-request
file://my-csr-directory/{}
On Windows PowerShell, the command to create certificates for all CSRs in
my-csr-directory is:
> ls -Name my-csr-directory | %{aws iot create-certificate-from-csr
--certificate-signing-request file://my-csr-directory/$_}
On a Windows command prompt, the command to create certificates for all
CSRs in my-csr-directory is:
> forfiles /p my-csr-directory /c "cmd /c aws iot
create-certificate-from-csr --certificate-signing-request file://@path"
"""
def create_certificate_from_csr(client, input, options \\ []) do
url = "/certificates"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a 2048-bit RSA key pair and issues an X.509 certificate using the
issued public key.
**Note** This is the only time AWS IoT issues the private key for this
certificate, so it is important to keep it in a secure location.
"""
def create_keys_and_certificate(client, input, options \\ []) do
url = "/keys-and-certificate"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates an AWS IoT policy.
The created policy is the default version for the policy. This operation
creates a policy version with a version identifier of **1** and sets **1**
as the policy's default version.
"""
def create_policy(client, policy_name, input, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a new version of the specified AWS IoT policy. To update a policy,
create a new policy version. A managed policy can have up to five versions.
If the policy has five versions, you must use `DeletePolicyVersion` to
delete an existing version before you create a new one.
Optionally, you can set the new version as the policy's default version.
The default version is the operative version (that is, the version that is
in effect for the certificates to which the policy is attached).
"""
def create_policy_version(client, policy_name, input, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}/version"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a thing record in the thing registry.
"""
def create_thing(client, thing_name, input, options \\ []) do
url = "/things/#{URI.encode(thing_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a new thing type.
"""
def create_thing_type(client, thing_type_name, input, options \\ []) do
url = "/thing-types/#{URI.encode(thing_type_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Creates a rule. Creating rules is an administrator-level action. Any user
who has permission to create rules will be able to access data processed by
the rule.
"""
def create_topic_rule(client, rule_name, input, options \\ []) do
url = "/rules/#{URI.encode(rule_name)}"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Deletes a registered CA certificate.
"""
def delete_c_a_certificate(client, certificate_id, input, options \\ []) do
url = "/cacertificate/#{URI.encode(certificate_id)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes the specified certificate.
A certificate cannot be deleted if it has a policy attached to it or if its
status is set to ACTIVE. To delete a certificate, first use the
`DetachPrincipalPolicy` API to detach all policies. Next, use the
`UpdateCertificate` API to set the certificate to the INACTIVE status.
"""
def delete_certificate(client, certificate_id, input, options \\ []) do
url = "/certificates/#{URI.encode(certificate_id)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes the specified policy.
A policy cannot be deleted if it has non-default versions or it is attached
to any certificate.
To delete a policy, use the DeletePolicyVersion API to delete all
non-default versions of the policy; use the DetachPrincipalPolicy API to
detach the policy from any certificate; and then use the DeletePolicy API
to delete the policy.
When a policy is deleted using DeletePolicy, its default version is deleted
with it.
"""
def delete_policy(client, policy_name, input, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes the specified version of the specified policy. You cannot delete
the default version of a policy using this API. To delete the default
version of a policy, use `DeletePolicy`. To find out which version of a
policy is marked as the default version, use ListPolicyVersions.
"""
def delete_policy_version(client, policy_name, policy_version_id, input, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}/version/#{URI.encode(policy_version_id)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes a CA certificate registration code.
"""
def delete_registration_code(client, input, options \\ []) do
url = "/registrationcode"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes the specified thing.
"""
def delete_thing(client, thing_name, input, options \\ []) do
url = "/things/#{URI.encode(thing_name)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes the specified thing type . You cannot delete a thing type if it has
things associated with it. To delete a thing type, first mark it as
deprecated by calling `DeprecateThingType`, then remove any associated
things by calling `UpdateThing` to change the thing type on any associated
thing, and finally use `DeleteThingType` to delete the thing type.
"""
def delete_thing_type(client, thing_type_name, input, options \\ []) do
url = "/thing-types/#{URI.encode(thing_type_name)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deletes the specified rule.
"""
def delete_topic_rule(client, rule_name, input, options \\ []) do
url = "/rules/#{URI.encode(rule_name)}"
headers = []
request(client, :delete, url, headers, input, options, nil)
end
@doc """
Deprecates a thing type. You can not associate new things with deprecated
thing type.
"""
def deprecate_thing_type(client, thing_type_name, input, options \\ []) do
url = "/thing-types/#{URI.encode(thing_type_name)}/deprecate"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Describes a registered CA certificate.
"""
def describe_c_a_certificate(client, certificate_id, options \\ []) do
url = "/cacertificate/#{URI.encode(certificate_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about the specified certificate.
"""
def describe_certificate(client, certificate_id, options \\ []) do
url = "/certificates/#{URI.encode(certificate_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Returns a unique endpoint specific to the AWS account making the call.
"""
def describe_endpoint(client, options \\ []) do
url = "/endpoint"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about the specified thing.
"""
def describe_thing(client, thing_name, options \\ []) do
url = "/things/#{URI.encode(thing_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about the specified thing type.
"""
def describe_thing_type(client, thing_type_name, options \\ []) do
url = "/thing-types/#{URI.encode(thing_type_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Removes the specified policy from the specified certificate.
"""
def detach_principal_policy(client, policy_name, input, options \\ []) do
url = "/principal-policies/#{URI.encode(policy_name)}"
headers = []
if Map.has_key?(input, "principal") do
headers = [{"x-amzn-iot-principal", input["principal"]}|headers]
input = Map.delete(input, "principal")
request(client, :delete, url, headers, input, options, nil)
else
request(client, :delete, url, headers, input, options, nil)
end
end
@doc """
Detaches the specified principal from the specified thing.
"""
def detach_thing_principal(client, thing_name, input, options \\ []) do
url = "/things/#{URI.encode(thing_name)}/principals"
headers = []
if Map.has_key?(input, "principal") do
headers = [{"x-amzn-principal", input["principal"]}|headers]
input = Map.delete(input, "principal")
request(client, :delete, url, headers, input, options, nil)
else
request(client, :delete, url, headers, input, options, nil)
end
end
@doc """
Disables the specified rule.
"""
def disable_topic_rule(client, rule_name, input, options \\ []) do
url = "/rules/#{URI.encode(rule_name)}/disable"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Enables the specified rule.
"""
def enable_topic_rule(client, rule_name, input, options \\ []) do
url = "/rules/#{URI.encode(rule_name)}/enable"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Gets the logging options.
"""
def get_logging_options(client, options \\ []) do
url = "/loggingOptions"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about the specified policy with the policy document of the
default version.
"""
def get_policy(client, policy_name, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about the specified policy version.
"""
def get_policy_version(client, policy_name, policy_version_id, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}/version/#{URI.encode(policy_version_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets a registration code used to register a CA certificate with AWS IoT.
"""
def get_registration_code(client, options \\ []) do
url = "/registrationcode"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Gets information about the specified rule.
"""
def get_topic_rule(client, rule_name, options \\ []) do
url = "/rules/#{URI.encode(rule_name)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the CA certificates registered for your AWS account.
The results are paginated with a default page size of 25. You can use the
returned marker to retrieve additional results.
"""
def list_c_a_certificates(client, options \\ []) do
url = "/cacertificates"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the certificates registered in your AWS account.
The results are paginated with a default page size of 25. You can use the
returned marker to retrieve additional results.
"""
def list_certificates(client, options \\ []) do
url = "/certificates"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
List the device certificates signed by the specified CA certificate.
"""
def list_certificates_by_c_a(client, ca_certificate_id, options \\ []) do
url = "/certificates-by-ca/#{URI.encode(ca_certificate_id)}"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists certificates that are being transfered but not yet accepted.
"""
def list_outgoing_certificates(client, options \\ []) do
url = "/certificates-out-going"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists your policies.
"""
def list_policies(client, options \\ []) do
url = "/policies"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the principals associated with the specified policy.
"""
def list_policy_principals(client, policy_name \\ nil, options \\ []) do
url = "/policy-principals"
headers = []
if !is_nil(policy_name) do
headers = [{"x-amzn-iot-policy", policy_name}|headers]
request(client, :get, url, headers, nil, options, nil)
else
request(client, :get, url, headers, nil, options, nil)
end
end
@doc """
Lists the versions of the specified policy and identifies the default
version.
"""
def list_policy_versions(client, policy_name, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}/version"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the policies attached to the specified principal. If you use an
Cognito identity, the ID must be in [AmazonCognito Identity
format](http://docs.aws.amazon.com/cognitoidentity/latest/APIReference/API_GetCredentialsForIdentity.html#API_GetCredentialsForIdentity_RequestSyntax).
"""
def list_principal_policies(client, principal \\ nil, options \\ []) do
url = "/principal-policies"
headers = []
if !is_nil(principal) do
headers = [{"x-amzn-iot-principal", principal}|headers]
request(client, :get, url, headers, nil, options, nil)
else
request(client, :get, url, headers, nil, options, nil)
end
end
@doc """
Lists the things associated with the specified principal.
"""
def list_principal_things(client, principal \\ nil, options \\ []) do
url = "/principals/things"
headers = []
if !is_nil(principal) do
headers = [{"x-amzn-principal", principal}|headers]
request(client, :get, url, headers, nil, options, nil)
else
request(client, :get, url, headers, nil, options, nil)
end
end
@doc """
Lists the principals associated with the specified thing.
"""
def list_thing_principals(client, thing_name, options \\ []) do
url = "/things/#{URI.encode(thing_name)}/principals"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the existing thing types.
"""
def list_thing_types(client, options \\ []) do
url = "/thing-types"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists your things. Use the **attributeName** and **attributeValue**
parameters to filter your things. For example, calling `ListThings` with
attributeName=Color and attributeValue=Red retrieves all things in the
registry that contain an attribute **Color** with the value **Red**.
"""
def list_things(client, options \\ []) do
url = "/things"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Lists the rules for the specific topic.
"""
def list_topic_rules(client, options \\ []) do
url = "/rules"
headers = []
request(client, :get, url, headers, nil, options, nil)
end
@doc """
Registers a CA certificate with AWS IoT. This CA certificate can then be
used to sign device certificates, which can be then registered with AWS
IoT. You can register up to 10 CA certificates per AWS account that have
the same subject field and public key. This enables you to have up to 10
certificate authorities sign your device certificates. If you have more
than one CA certificate registered, make sure you pass the CA certificate
when you register your device certificates with the RegisterCertificate
API.
"""
def register_c_a_certificate(client, input, options \\ []) do
url = "/cacertificate"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Registers a device certificate with AWS IoT. If you have more than one CA
certificate that has the same subject field, you must specify the CA
certificate that was used to sign the device certificate being registered.
"""
def register_certificate(client, input, options \\ []) do
url = "/certificate/register"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Rejects a pending certificate transfer. After AWS IoT rejects a certificate
transfer, the certificate status changes from **PENDING_TRANSFER** to
**INACTIVE**.
To check for pending certificate transfers, call `ListCertificates` to
enumerate your certificates.
This operation can only be called by the transfer destination. After it is
called, the certificate will be returned to the source's account in the
INACTIVE state.
"""
def reject_certificate_transfer(client, certificate_id, input, options \\ []) do
url = "/reject-certificate-transfer/#{URI.encode(certificate_id)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Replaces the specified rule. You must specify all parameters for the new
rule. Creating rules is an administrator-level action. Any user who has
permission to create rules will be able to access data processed by the
rule.
"""
def replace_topic_rule(client, rule_name, input, options \\ []) do
url = "/rules/#{URI.encode(rule_name)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Sets the specified version of the specified policy as the policy's default
(operative) version. This action affects all certificates to which the
policy is attached. To list the principals the policy is attached to, use
the ListPrincipalPolicy API.
"""
def set_default_policy_version(client, policy_name, policy_version_id, input, options \\ []) do
url = "/policies/#{URI.encode(policy_name)}/version/#{URI.encode(policy_version_id)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Sets the logging options.
"""
def set_logging_options(client, input, options \\ []) do
url = "/loggingOptions"
headers = []
request(client, :post, url, headers, input, options, nil)
end
@doc """
Transfers the specified certificate to the specified AWS account.
You can cancel the transfer until it is acknowledged by the recipient.
No notification is sent to the transfer destination's account. It is up to
the caller to notify the transfer target.
The certificate being transferred must not be in the ACTIVE state. You can
use the UpdateCertificate API to deactivate it.
The certificate must not have any policies attached to it. You can use the
DetachPrincipalPolicy API to detach them.
"""
def transfer_certificate(client, certificate_id, input, options \\ []) do
url = "/transfer-certificate/#{URI.encode(certificate_id)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
@doc """
Updates a registered CA certificate.
"""
def update_c_a_certificate(client, certificate_id, input, options \\ []) do
url = "/cacertificate/#{URI.encode(certificate_id)}"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Updates the status of the specified certificate. This operation is
idempotent.
Moving a certificate from the ACTIVE state (including REVOKED) will not
disconnect currently connected devices, but these devices will be unable to
reconnect.
The ACTIVE state is required to authenticate devices connecting to AWS IoT
using a certificate.
"""
def update_certificate(client, certificate_id, input, options \\ []) do
url = "/certificates/#{URI.encode(certificate_id)}"
headers = []
request(client, :put, url, headers, input, options, nil)
end
@doc """
Updates the data for a thing.
"""
def update_thing(client, thing_name, input, options \\ []) do
url = "/things/#{URI.encode(thing_name)}"
headers = []
request(client, :patch, url, headers, input, options, nil)
end
defp request(client, method, url, headers, input, options, success_status_code) do
client = %{client | service: "execute-api"}
host = get_host("iot", client)
url = get_url(host, url, client)
headers = Enum.concat([{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"}],
headers)
payload = encode_payload(input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(method, url, payload, headers, options, success_status_code)
end
defp perform_request(method, url, payload, headers, options, nil) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, response=%HTTPoison.Response{status_code: 202, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, response=%HTTPoison.Response{status_code: 204, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
reason = Poison.Parser.parse!(body)["message"]
{:error, reason}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp perform_request(method, url, payload, headers, options, success_status_code) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: ^success_status_code, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: ^success_status_code, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
reason = Poison.Parser.parse!(body)["message"]
{:error, reason}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, url, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{url}/"
end
defp encode_payload(input) do
if input != nil do
Poison.Encoder.encode(input, [])
else
""
end
end
end
|
lib/aws/iot.ex
| 0.757301
| 0.52208
|
iot.ex
|
starcoder
|
defmodule ShEx.ShExJ.Decoder do
@moduledoc false
import ShEx.Utils
def decode(content, options \\ []) do
with {:ok, json_object} <- parse_json(content, options) do
to_schema(json_object, options)
end
end
defp to_schema(%{type: "Schema"} = schema, options) do
with {:ok, shapes} <-
schema
|> Map.get(:shapes, [])
|> map(&to_shape_expression/2, options)
|> empty_to_nil(),
{:ok, start} <-
schema
|> Map.get(:start)
|> if_present(&to_shape_expression/2, options),
{:ok, imports} <-
schema
|> Map.get(:imports, [])
|> map(&to_import/2, options)
|> empty_to_nil(),
{:ok, start_acts} <-
schema
|> Map.get(:startActs, [])
|> map(&to_semantic_action/2, options)
|> empty_to_nil() do
ShEx.Schema.new(shapes, start, imports, start_acts)
end
end
defp to_shape_expression(%{type: "NodeConstraint"} = node_constraint, options) do
with {:ok, id} <-
node_constraint
|> Map.get(:id)
|> if_present(&to_shape_expression_label/2, options),
{:ok, node_kind} <-
node_constraint
|> Map.get(:nodeKind)
|> if_present(&to_node_kind/2, options),
{:ok, datatype} <-
node_constraint
|> Map.get(:datatype)
|> if_present(&to_iri/2, options),
string_facets <-
ShEx.NodeConstraint.StringFacets.new(node_constraint),
numeric_facets <-
ShEx.NodeConstraint.NumericFacets.new(node_constraint),
{:ok, values} <-
node_constraint
|> Map.get(:values, [])
|> map(&to_value_set_value/2, options)
|> empty_to_nil() do
{:ok,
%ShEx.NodeConstraint{
id: id,
node_kind: node_kind,
datatype: datatype,
string_facets: string_facets,
numeric_facets: numeric_facets,
values: ShEx.NodeConstraint.Values.new(values)
}}
end
end
defp to_shape_expression(%{type: "Shape"} = shape, options) do
with {:ok, id} <-
shape
|> Map.get(:id)
|> if_present(&to_shape_expression_label/2, options),
{:ok, expression} <-
shape
|> Map.get(:expression)
|> if_present(&to_triple_expression/2, options),
{:ok, closed} <-
shape
|> Map.get(:closed)
|> if_present(&to_bool/2, options),
{:ok, extra} <-
shape
|> Map.get(:extra, [])
|> map(&to_iri/2, options)
|> empty_to_nil(),
{:ok, sem_acts} <-
shape
|> Map.get(:semActs, [])
|> map(&to_semantic_action/2, options)
|> empty_to_nil(),
{:ok, annotations} <-
shape
|> Map.get(:annotations, [])
|> map(&to_annotation/2, options)
|> empty_to_nil() do
{:ok,
%ShEx.Shape{
id: id,
expression: expression,
closed: closed,
extra: extra,
sem_acts: sem_acts,
annotations: annotations
}}
end
end
defp to_shape_expression(%{type: type} = shape_expression_combinator, options)
when type in ~w[ShapeOr ShapeAnd] do
type_mod = Module.concat([ShEx, type])
with {:ok, id} <-
shape_expression_combinator
|> Map.get(:id)
|> if_present(&to_shape_expression_label/2, options),
{:ok, shape_exprs} <-
shape_expression_combinator
|> Map.get(:shapeExprs, [])
|> map(&to_shape_expression/2, options) do
if length(shape_exprs) >= 2 do
{:ok,
struct(type_mod,
id: id,
shape_exprs: shape_exprs
)}
else
{:error, "Invalid #{type}: must have >= 2 shapeExprs, but has #{length(shape_exprs)}}"}
end
end
end
defp to_shape_expression(%{type: "ShapeNot"} = shape_not, options) do
with {:ok, id} <-
shape_not
|> Map.get(:id)
|> if_present(&to_shape_expression_label/2, options),
{:ok, shape_expr} <-
shape_not
|> Map.get(:shapeExpr, [])
|> to_shape_expression(options) do
{:ok,
%ShEx.ShapeNot{
id: id,
shape_expr: shape_expr
}}
end
end
defp to_shape_expression(%{type: "ShapeExternal"} = shape_external, options) do
with {:ok, id} <-
shape_external
|> Map.get(:id)
|> if_present(&to_shape_expression_label/2, options) do
{:ok, %ShEx.ShapeExternal{id: id}}
end
end
defp to_shape_expression(shape_expr_label, options) when is_binary(shape_expr_label) do
to_shape_expression_label(shape_expr_label, options)
end
defp to_shape_expression(invalid, _) do
{:error, "invalid shape expression: #{inspect(invalid)}}"}
end
defp to_triple_expression(
%{type: "TripleConstraint", predicate: predicate} = triple_constraint,
options
) do
with {:ok, id} <-
triple_constraint
|> Map.get(:id)
|> if_present(&to_triple_expression_label/2, options),
{:ok, value_expr} <-
triple_constraint
|> Map.get(:valueExpr)
|> if_present(&to_shape_expression/2, options),
{:ok, predicate} <-
to_iri(predicate, options),
{:ok, inverse} <-
triple_constraint
|> Map.get(:inverse)
|> if_present(&to_bool/2, options),
{:ok, min} <-
triple_constraint
|> Map.get(:min)
|> if_present(&to_integer/2, options),
{:ok, max} <-
triple_constraint
|> Map.get(:max)
|> if_present(&to_integer/2, options),
{:ok, sem_acts} <-
triple_constraint
|> Map.get(:semActs, [])
|> map(&to_semantic_action/2, options)
|> empty_to_nil(),
{:ok, annotations} <-
triple_constraint
|> Map.get(:annotations, [])
|> map(&to_annotation/2, options)
|> empty_to_nil() do
{:ok,
%ShEx.TripleConstraint{
id: id,
value_expr: value_expr,
predicate: predicate,
inverse: inverse,
min: min,
max: max,
sem_acts: sem_acts,
annotations: annotations
}}
end
end
defp to_triple_expression(%{type: "TripleConstraint"} = invalid, options) do
{:error, "invalid TripleConstraint: #{inspect(invalid)}}"}
end
defp to_triple_expression(%{type: type} = triple_expression_combinator, options)
when type in ~w[EachOf OneOf] do
type_mod = Module.concat([ShEx, type])
with {:ok, id} <-
triple_expression_combinator
|> Map.get(:id)
|> if_present(&to_triple_expression_label/2, options),
{:ok, expressions} <-
triple_expression_combinator
|> Map.get(:expressions, [])
|> map(&to_triple_expression/2, options),
{:ok, min} <-
triple_expression_combinator
|> Map.get(:min)
|> if_present(&to_integer/2, options),
{:ok, max} <-
triple_expression_combinator
|> Map.get(:max)
|> if_present(&to_integer/2, options),
{:ok, sem_acts} <-
triple_expression_combinator
|> Map.get(:semActs, [])
|> map(&to_semantic_action/2, options)
|> empty_to_nil(),
{:ok, annotations} <-
triple_expression_combinator
|> Map.get(:annotations, [])
|> map(&to_annotation/2, options)
|> empty_to_nil() do
if length(expressions) >= 2 do
{:ok,
struct(type_mod,
id: id,
expressions: expressions,
min: min,
max: max,
sem_acts: sem_acts,
annotations: annotations
)}
else
{:error, "Invalid #{type}: must have >= 2 shapeExprs, but has #{length(expressions)}}"}
end
end
end
defp to_triple_expression(triple_expr_ref, options) when is_binary(triple_expr_ref) do
to_triple_expression_label(triple_expr_ref, options)
end
defp to_triple_expression(invalid, _) do
{:error, "invalid triple expression: #{inspect(invalid)}}"}
end
defp to_import(iri, options) when is_binary(iri),
do: to_iri(iri, options)
defp to_import(invalid, _),
do: {:error, "invalid import: #{inspect(invalid)}}"}
defp to_semantic_action(%{type: "SemAct", name: name} = sem_act, options)
when is_binary(name) do
with {:ok, name_iri} <- to_iri(name, options) do
{:ok,
%ShEx.SemAct{
name: name_iri,
code: Map.get(sem_act, :code)
}}
end
end
defp to_semantic_action(%{type: "SemAct"} = invalid, _) do
{:error, "invalid SemAct: #{inspect(invalid)}}"}
end
defp to_annotation(
%{type: "Annotation", predicate: predicate, object: object} = annotation,
options
)
when is_binary(predicate) do
with {:ok, predicate_iri} <- to_iri(predicate, options),
{:ok, object_value} <- to_object_value(object, options) do
{:ok,
%ShEx.Annotation{
predicate: predicate_iri,
object: object_value
}}
end
end
defp to_annotation(%{type: "Annotation"} = invalid, _) do
{:error, "invalid Annotation: #{inspect(invalid)}"}
end
defp to_shape_expression_label("_:" <> bnode, options),
do: {:ok, RDF.bnode(bnode)}
defp to_shape_expression_label(iri, options) when is_binary(iri),
do: to_iri(iri, options)
defp to_shape_expression_label(invalid, _),
do: {:error, "invalid shape expression label: #{inspect(invalid)}}"}
defp to_triple_expression_label("_:" <> bnode, options),
do: {:ok, RDF.bnode(bnode)}
defp to_triple_expression_label(iri, options) when is_binary(iri),
do: to_iri(iri, options)
defp to_triple_expression_label(invalid, _),
do: {:error, "invalid triple expression label: #{inspect(invalid)}}"}
defp to_node_kind(node_kind, _) do
if node_kind in ShEx.NodeConstraint.node_kinds() do
{:ok, node_kind}
else
{:error, "invalid node kind: #{inspect(node_kind)}}"}
end
end
defp to_value_set_value(iri, options) when is_binary(iri) do
to_iri(iri, options)
end
defp to_value_set_value(%{value: _} = literal, options) do
to_literal(literal, options)
end
defp to_value_set_value(%{type: "IriStem", stem: stem} = iri_stem, options) do
with {:ok, iri} <- to_iri(stem, options) do
{:ok, %{iri_stem | stem: iri}}
end
end
defp to_value_set_value(
%{type: "IriStemRange", stem: stem, exclusions: exclusions} = iri_stem_range,
options
) do
with {:ok, iri_or_wildcard} <-
to_iri_or_wildcard(stem, options),
{:ok, exclusion_values} <-
map(exclusions, &to_value_set_value/2, options) do
{:ok, %{iri_stem_range | stem: iri_or_wildcard, exclusions: exclusion_values}}
end
end
defp to_value_set_value(%{type: "Language", languageTag: _} = language, _) do
{:ok, language}
end
defp to_value_set_value(%{type: type} = stem, _) when type in ~w[LanguageStem LiteralStem] do
{:ok, stem}
end
defp to_value_set_value(%{exclusions: _} = stem_range, options) do
{:ok, stem_range}
end
defp to_value_set_value(invalid, _) do
{:error, "invalid value set value: #{inspect(invalid)}}"}
end
defp to_object_value(iri, options) when is_binary(iri),
do: to_iri(iri, options)
defp to_object_value(%{value: _} = literal, options),
do: to_literal(literal, options)
defp to_object_value(invalid, _),
do: {:error, "invalid object value: #{inspect(invalid)}}"}
defp to_iri_or_wildcard(%{type: "Wildcard"} = wildcard, _), do: {:ok, wildcard}
defp to_iri_or_wildcard(iri, options), do: to_iri(iri, options)
defp to_iri(iri, options) do
cond do
RDF.IRI.absolute?(iri) ->
{:ok, RDF.iri(iri)}
base = Keyword.get(options, :base) ->
{:ok, RDF.IRI.merge(base, iri)}
true ->
{:error, "unresolvable relative IRI '#{iri}', no base iri defined"}
end
end
defp to_literal(%{value: value, type: datatype}, _options),
do: {:ok, RDF.literal(value, datatype: datatype)}
defp to_literal(%{value: value, language: language}, _options),
do: {:ok, RDF.literal(value, language: language)}
defp to_literal(%{value: value}, _options),
do: {:ok, RDF.literal(value)}
defp to_literal(invalid, _),
do: {:error, "invalid literal: #{inspect(invalid)}}"}
defp to_integer(integer, _) when is_integer(integer),
do: {:ok, integer}
defp to_integer(invalid, _),
do: {:error, "invalid integer: #{inspect(invalid)}}"}
defp to_bool(bool, _) when is_boolean(bool),
do: {:ok, bool}
defp to_bool(invalid, _),
do: {:error, "invalid boolean: #{inspect(invalid)}}"}
defp parse_json(content, _opts \\ []) do
Jason.decode(content, keys: :atoms!)
rescue
error in [ArgumentError] ->
case __STACKTRACE__ do
[{:erlang, :binary_to_existing_atom, [bad_property, _], []} | _] ->
{:error, "invalid ShExJ property: #{bad_property}"}
_ ->
reraise error, __STACKTRACE__
end
end
@doc !"Some allowed keys which we don't want to cause to_existing_atom to fail"
def allowed_keys_fix, do: ~w[@context valueExpr languageTag]a
end
|
lib/shex/shexj/decoder.ex
| 0.710829
| 0.46478
|
decoder.ex
|
starcoder
|
defmodule Bank.Ledger do
@moduledoc ~S"""
A simple implementation of double-entry accounting system.
Basically, we store every `Bank.Ledger.Entry` twice - once for each account affected.
Thus, if Alice transfers $10.00 to Bob we'll have two entries:
- debit Alice's account for $10.00
- credit Bob's account for $10.00
`Bank.Ledger.Entry` can be a credit or a debit. Depending on `Bank.Ledger.Account`'s type,
a credit can result in the increase (or decrease) of that accounts' balance.
See `balance/1`.
Double-entry accounting system implementation is usually required for
compliance with other financial institutions.
See [Wikipedia entry for more information](https://en.wikipedia.org/wiki/Double-entry_bookkeeping_system#Debits_and_credits)
"""
use Bank.Model
alias Bank.Ledger.{Account, Entry}
@doc ~S"""
Creates a wallet account for a given `username`.
"""
def create_wallet!(username) do
Account.build_wallet(username)
|> Repo.insert!
end
@doc ~S"""
Returns account's balance as `Money`.
We calculate balance over all account's entry.
Balance increases or decreases are based on `Bank.Account`'s type
and `Bank.Entry`'s type according to this table:
| Debit | Credit
----------|----------|---------
Asset | Increase | Decrease
Liability | Decrease | Increase
"""
def balance(%Account{id: id, type: type, currency: currency}) do
q = from(t in Entry,
select: fragment("SUM(CASE WHEN b0.type = 'credit' THEN (b0.amount).cents ELSE -(b0.amount).cents END)"),
where: t.account_id == ^id)
balance = Repo.one(q) || 0
balance = do_balance(balance, type)
%Money{cents: balance, currency: currency}
end
defp do_balance(balance, "liability"), do: +balance
defp do_balance(balance, "asset"), do: -balance
def deposits_account do
Repo.get_by(Account, name: "Deposits") ||
Repo.insert!(Account.build_asset("Deposits"))
end
def entries(%Account{id: id}) do
Repo.all(from t in Entry, where: t.account_id == ^id)
end
def write(entries) do
Repo.transaction_with_isolation(fn ->
with :ok <- same_currencies(entries),
{:ok, persisted_entries} <- insert(entries),
:ok <- credits_equal_debits(),
:ok <- sufficient_funds(persisted_entries) do
persisted_entries
else
{:error, reason} ->
Repo.rollback(reason)
end
end, level: :serializable)
end
defp same_currencies(entries) do
{_, _, _, %Money{currency: currency}} = hd(entries)
currencies =
Enum.flat_map(entries, fn {_, %Account{currency: a}, _, %Money{currency: b}} -> [a, b] end)
if Enum.uniq(currencies) == [currency] do
:ok
else
{:error, :different_currencies}
end
end
defp insert(entries) do
entries =
Enum.map(entries, fn tuple ->
Entry.from_tuple(tuple)
|> Repo.insert!
end)
{:ok, entries}
end
defp credits_equal_debits do
q = from e in Entry, select: fragment("SUM((b0.amount).cents)")
credits = Repo.one!(from(e in q, where: e.type == "credit"))
debits = Repo.one!(from(e in q, where: e.type == "debit"))
if credits == debits do
:ok
else
{:error, :credits_not_equal_debits}
end
end
defp sufficient_funds(entries) do
accounts = Enum.map(entries, & &1.account)
if Enum.all?(accounts, fn account -> balance(account).cents >= 0 end) do
:ok
else
{:error, :insufficient_funds}
end
end
end
|
apps/bank/lib/bank/ledger.ex
| 0.822474
| 0.668586
|
ledger.ex
|
starcoder
|
defmodule Tint.RGB do
@moduledoc """
A color in the RGB (red, green, blue) colorspace.
"""
import Tint.Utils.Cast
alias Tint.Distance
alias Tint.RGB.HexCode
alias Tint.Utils.Interval
defstruct [:red, :green, :blue]
@type t :: %__MODULE__{
red: non_neg_integer,
green: non_neg_integer,
blue: non_neg_integer
}
@channel_interval Interval.new(0, 255)
@doc false
@spec __channel_interval__() :: Interval.t()
def __channel_interval__, do: @channel_interval
@doc """
Builds a new RGB color from red, green and blue color parts. Please always
use this function to build a new RGB color.
## Examples
iex> Tint.RGB.new(0, 0, 0)
#Tint.RGB<0,0,0 (#000000)>
iex> Tint.RGB.new(255, 127, 30)
#Tint.RGB<255,127,30 (#FF7F1E)>
iex> Tint.RGB.new(256, -1, 0)
** (Tint.OutOfRangeError) Value 256 is out of range [0,255]
"""
@spec new(number | String.t(), number | String.t(), number | String.t()) :: t
def new(red, green, blue) do
%__MODULE__{
red: cast_channel!(red),
green: cast_channel!(green),
blue: cast_channel!(blue)
}
end
defp cast_channel!(channel) do
cast_value_with_interval!(channel, :integer, @channel_interval)
end
@doc """
Builds a new RGB color from the given hex code.
## Examples
iex> Tint.RGB.from_hex("#FF7F1E")
{:ok, %Tint.RGB{red: 255, green: 127, blue: 30}}
iex> Tint.RGB.from_hex("F00")
{:ok, %Tint.RGB{red: 255, green: 0, blue: 0}}
iex> Tint.RGB.from_hex("invalid")
:error
"""
@spec from_hex(String.t()) :: {:ok, t} | :error
def from_hex(code) do
HexCode.parse(code)
end
@doc """
Builds a new RGB color from the given hex code. Raises when the given hex code
is invalid.
## Examples
iex> Tint.RGB.from_hex!("#FF7F1E")
#Tint.RGB<255,127,30 (#FF7F1E)>
iex> Tint.RGB.from_hex!("invalid")
** (ArgumentError) Invalid hex code: invalid
"""
@spec from_hex!(String.t()) :: t | no_return
def from_hex!(code) do
case from_hex(code) do
{:ok, color} -> color
:error -> raise ArgumentError, "Invalid hex code: #{code}"
end
end
@doc """
Builds a new RGB color from red, green and blue color ratios.
## Example
iex> Tint.RGB.from_ratios(1, 0.5, 0)
#Tint.RGB<255,128,0 (#FF8000)>
"""
@spec from_ratios(
number | String.t(),
number | String.t(),
number | String.t()
) :: t
def from_ratios(red_ratio, green_ratio, blue_ratio) do
%__MODULE__{
red: cast_ratio_to_channel!(red_ratio),
green: cast_ratio_to_channel!(green_ratio),
blue: cast_ratio_to_channel!(blue_ratio)
}
end
defp cast_ratio_to_channel!(ratio) do
ratio
|> cast_ratio!()
|> ratio_to_channel()
end
defp ratio_to_channel(ratio) do
round(ratio * @channel_interval.max)
end
@doc """
Converts a tuple containing hue, saturation and value into a `Tint.RGB`
struct.
## Example
iex> Tint.RGB.from_tuple({255, 127, 30})
#Tint.RGB<255,127,30 (#FF7F1E)>
"""
@spec from_tuple(
{number | String.t(), number | String.t(), number | String.t()}
) :: t
def from_tuple({red, green, blue}) do
new(red, green, blue)
end
@doc """
Converts a RGB color to a hex code.
## Example
iex> Tint.RGB.to_hex(%Tint.RGB{red: 255, green: 127, blue: 30})
"#FF7F1E"
"""
@spec to_hex(t) :: String.t()
def to_hex(%__MODULE__{} = color) do
HexCode.serialize(color)
end
@doc """
Builds a tuple containing the ratios of the red, green and blue components of
a given color.
"""
@spec to_ratios(t) :: {float, float, float}
def to_ratios(%__MODULE__{} = color) do
{channel_to_ratio(color.red), channel_to_ratio(color.green),
channel_to_ratio(color.blue)}
end
defp channel_to_ratio(channel) do
channel / @channel_interval.max
end
@doc """
Converts a RGB color into a tuple containing the red, green and blue channels.
## Example
iex> Tint.RGB.to_tuple(%Tint.RGB{red: 255, green: 127, blue: 30})
{255, 127, 30}
"""
@spec to_tuple(t) :: {non_neg_integer, non_neg_integer, non_neg_integer}
def to_tuple(%__MODULE__{} = color) do
{color.red, color.green, color.blue}
end
# Complementary Color
@doc """
Calculates the complementary of the given RGB color.
## Example
iex> Tint.RGB.complementary_color(%Tint.RGB{red: 255, green: 0, blue: 0})
#Tint.RGB<0,255,255 (#00FFFF)>
"""
@doc since: "1.1.0"
@spec complementary_color(t) :: t
def complementary_color(%__MODULE__{} = color) do
channel_size = @channel_interval.max
red = channel_size - color.red
green = channel_size - color.green
blue = channel_size - color.blue
new(red, green, blue)
end
# Distance
@doc """
Calculates the Euclidean distance of two colors.
## Options
* `:weights` - A tuple defining the weights for the red, green and blue color
channels. Defaults to `{1, 1, 1}`.
"""
@doc since: "0.2.0"
@spec euclidean_distance(Tint.color(), Tint.color(), Keyword.t()) :: float
def euclidean_distance(color, other_color, opts \\ []) do
Distance.distance(color, other_color, {Distance.Euclidean, opts})
end
@doc """
Finds the nearest color for the specified color using the given color palette
and an optional distance algorithm.
"""
@doc since: "1.0.0"
@spec nearest_color(
Tint.color(),
[Tint.color()],
Distance.distance_algorithm()
) :: nil | Tint.color()
def nearest_color(
color,
palette,
distance_algorithm \\ Distance.Euclidean
) do
Distance.nearest_color(color, palette, distance_algorithm)
end
@doc """
Finds the n nearest colors for the specified color using the given color
palette and an optional distance algorithm.
"""
@doc since: "1.0.0"
@spec nearest_colors(
Tint.color(),
[Tint.color()],
non_neg_integer,
Distance.distance_algorithm()
) :: [Tint.color()]
def nearest_colors(
color,
palette,
n,
distance_algorithm \\ Distance.Euclidean
) do
Distance.nearest_colors(color, palette, n, distance_algorithm)
end
@doc """
Determines whether the given color is a grayscale color which basically means
that the red, green and blue channels of the color have the same value.
"""
@doc since: "1.0.0"
@spec grayscale?(t) :: boolean
def grayscale?(color)
def grayscale?(%__MODULE__{red: value, green: value, blue: value}), do: true
def grayscale?(%__MODULE__{}), do: false
@doc """
Determines whether the given color is grayish based on the distance of the
red, green an blue channels of the color.
Additionally, you have to specify a tolerance that defines how far the min and
the max channels may be apart from each other. A tolerance of 0 means that the
color has to be an exact grayscale color. A tolerance of 255 means that any
color is regarded gray.
"""
@spec grayish?(t, non_neg_integer) :: boolean
def grayish?(color, tolerance)
def grayish?(color, 0), do: grayscale?(color)
def grayish?(color, tolerance) do
tolerance = cast_channel!(tolerance)
{min, max} = Enum.min_max([color.red, color.green, color.blue])
max - min <= tolerance
end
defimpl Inspect do
import Inspect.Algebra
alias Tint.RGB
def inspect(color, opts) do
concat([
"#Tint.RGB<",
to_doc(color.red, opts),
",",
to_doc(color.green, opts),
",",
to_doc(color.blue, opts),
" (",
RGB.to_hex(color),
")>"
])
end
end
end
|
lib/tint/rgb.ex
| 0.966992
| 0.433082
|
rgb.ex
|
starcoder
|
defmodule Plotex.ViewRange do
alias __MODULE__
@unix_epoch ~N[1970-01-01 00:00:00]
defstruct start: 10,
stop: 90,
projection: :cartesian
@type t :: %Plotex.ViewRange{start: number(), stop: number(), projection: :cartesian | :polar}
def new({a, b}, proj \\ :cartesian) do
%ViewRange{start: a, stop: b, projection: proj}
end
def empty(proj \\ :cartesian) do
%ViewRange{start: nil, stop: nil, projection: proj}
end
@doc """
Find the maximum and minumun points for a given line of data.
"""
def from(data, proj \\ :cartesian) do
unless Enum.count(data) == 0 do
{a, b} = Enum.min_max_by(data, &Plotex.ViewRange.convert/1)
%ViewRange{start: a, stop: b, projection: proj}
else
%ViewRange{start: nil, stop: nil, projection: proj}
end
end
def min_max(%{start: nil, stop: nil}, b), do: b
def min_max(a, %{start: nil, stop: nil}), do: a
def min_max(va, vb) do
start! = Enum.min_by([va.start, vb.start], &convert/1)
stop! = Enum.max_by([va.stop, vb.stop], &convert/1)
%ViewRange{start: start!, stop: stop!, projection: va.projection}
end
def convert(nil), do: nil
def convert(%Time{} = val), do: Time.to_erl(val)
def convert(%Date{} = val), do: Date.to_erl(val)
def convert(%DateTime{} = val), do: DateTime.to_unix(val, :nanosecond)
def convert(%NaiveDateTime{} = a), do: NaiveDateTime.diff(a, @unix_epoch, :nanosecond)
def convert(val) when is_number(val), do: val
def to_val(a, units \\ :nanosecond), do: vals(a, units)
def vals(%Date{} = a, _units), do: Date.diff(a, @unix_epoch)
def vals(%DateTime{} = a, units), do: DateTime.to_unix(a, units)
def vals(%NaiveDateTime{} = a, units), do: NaiveDateTime.diff(a, @unix_epoch, units)
def vals(a, _units), do: a
def diff(%DateTime{} = b, %DateTime{} = a), do: DateTime.diff(b, a, :nanosecond)
def diff(%NaiveDateTime{} = b, %NaiveDateTime{} = a), do: NaiveDateTime.diff(b, a, :nanosecond)
def diff(b, a), do: b - a
def pad(%ViewRange{start: start, stop: stop, projection: proj}, _opts)
when is_nil(start) or is_nil(stop) do
%ViewRange{start: nil, stop: nil, projection: proj}
end
def pad(%ViewRange{start: %DateTime{} = start, stop: %DateTime{} = stop} = vr, opts) do
amount = Keyword.get(opts, :padding, 0.05) * ViewRange.dist(vr)
%ViewRange{
start: start |> DateTime.add(-round(amount), :nanosecond),
stop: stop |> DateTime.add(round(amount), :nanosecond)
}
end
def pad(%ViewRange{start: %NaiveDateTime{} = start, stop: %NaiveDateTime{} = stop} = vr, opts) do
amount = Keyword.get(opts, :padding, 0.05) * ViewRange.dist(vr)
%ViewRange{
start: start |> NaiveDateTime.add(-round(amount), :nanosecond),
stop: stop |> NaiveDateTime.add(round(amount), :nanosecond)
}
end
def pad(%ViewRange{start: start, stop: stop, projection: proj} = vr, opts) do
amount = Keyword.get(opts, :padding, 0.05) * ViewRange.dist(vr)
%ViewRange{start: start - amount, stop: stop + amount, projection: proj}
end
def dist({start, stop}) when is_nil(start) or is_nil(stop) do
1.0
end
@type datetime :: DateTime.t() | NaiveDateTime.t()
@spec dist({datetime(), datetime()} | {nil, nil} | ViewRange.t()) :: number
def dist({%{} = start, %{} = stop}) do
diff = to_val(stop) - to_val(start)
if diff != 0 do
diff
else
1_000_000_000
end
end
def dist({start, stop}) do
if stop != start do
stop - start
else
1.0
end
end
def dist(%ViewRange{} = range) do
dist({range.start, range.stop})
end
end
|
lib/plotex/view_range.ex
| 0.782579
| 0.654053
|
view_range.ex
|
starcoder
|
defmodule ExPNG.Image do
alias __MODULE__, as: Image
defstruct width: 0, height: 0, pixels: <<>>
def new(width, height) do
new(width, height, ExPNG.Color.transparent)
end
def new(width, height, background_color) do
%Image{width: width, height: height, pixels: String.duplicate(background_color, width*height)}
end
def from_chunks(chunks) do
header = ExPNG.Chunks.header(chunks)
:ok = ensure_features_supported(header)
image_data = strip_scan_line_filter_byte(ExPNG.Chunks.image_data(chunks), header.width * 4, <<>>)
%Image{width: header.width, height: header.height, pixels: image_data}
end
defp ensure_features_supported(%ExPNG.Chunks.Header{bit_depth: 8, color_type: 6, compression_method: 0, interlace_method: 0}), do: :ok
defp ensure_features_supported(_), do: nil
defp strip_scan_line_filter_byte(<<>>, _, output), do: output
defp strip_scan_line_filter_byte(image_data, scan_line_width, output) do
<<_, scan_line::binary-size(scan_line_width), next_scan_lines::binary>> = image_data
strip_scan_line_filter_byte(next_scan_lines, scan_line_width, output <> scan_line)
end
defp add_scan_line_filter_byte(<<>>, _, output), do: output
defp add_scan_line_filter_byte(image_data, scan_line_width, output) do
<<scan_line::binary-size(scan_line_width), next_scan_lines::binary>> = image_data
add_scan_line_filter_byte(next_scan_lines, scan_line_width, output <> <<0>> <> scan_line)
end
# This will probably have to change but let's keep it simple for now
def to_chunks(image) do
{to_header_chunk(image), add_scan_line_filter_byte(image.pixels, image.width * 4, <<>>)}
end
def to_header_chunk(image) do
%ExPNG.Chunks.Header{
width: image.width,
height: image.height,
bit_depth: 8,
color_type: 6,
compression_method: 0,
interlace_method: 0,
filter_method: 0,
}
end
def size(%Image{width: w, height: h}) do
{w, h}
end
# FIXME this is probably incredibly inefficient
def set_pixel(image, x, y, color) do
offset = pixel_offset(image, x, y)
<<before::binary-size(offset), _::binary-size(4), rest::binary>> = image.pixels
%Image{image | pixels: (before <> color <> rest)}
end
def get_pixel(image, x, y) do
offset = pixel_offset(image, x, y)
<<_::binary-size(offset), pixel::binary-size(4), _::binary >> = image.pixels
pixel
end
defp pixel_offset(image, x, y) do
(y * image.width + x) * 4
end
end
|
lib/ex_png/image.ex
| 0.502197
| 0.489198
|
image.ex
|
starcoder
|
defmodule Grizzly.CommandClass.Powerlevel.Set do
@moduledoc """
Command module for working with the Powerlevel command class SET command.
It is used to temporarily decrease the RF transmit power level of a device.
Command Options:
* `:power_level` - By how much to decrease the RF transmit power level (:normal_power is no decrease): :normal_power | :minus1dBm | :minus2dBm | :minus3dBm | :minus4dBm | :minus5dBm | :minus6dBm | :minus7dBm | :minus8dBm | :minus9dBm
* `:timeout` - The number of seconds during which RF transmit power level is decreased (it is then returned to normal power automatically).
* `:seq_number` - The sequence number of the Z/IP Packet
* `:retries` - The number of times to try to send the command (default 2)
"""
@behaviour Grizzly.Command
alias Grizzly.Packet
alias Grizzly.Command.{EncodeError, Encoding}
alias Grizzly.CommandClass.Powerlevel
@type t :: %__MODULE__{
seq_number: Grizzly.seq_number(),
retries: non_neg_integer(),
# By how much to decrease the RF transmit power level (:normal_power is no decrease)
# :normal_power | :minus1dBm | :minus2dBm | :minus3dBm | :minus4dBm | :minus5dBm | :minus6dBm | :minus7dBm | :minus8dBm | :minus9dBm
power_level: Powerlevel.power_level_description(),
# The number of seconds during which RF transmit power level is decreased. It returns to normal power automatically after that.
timeout: non_neg_integer
}
@type opt ::
{:seq_number, Grizzly.seq_number()}
| {:retries, non_neg_integer()}
| {:power_level, Powerlevel.power_level_description()}
| {:timeout, non_neg_integer()}
@enforce_keys [:power_level, :timeout]
defstruct seq_number: nil, retries: 2, power_level: nil, timeout: nil
@spec init([opt]) :: {:ok, t}
def init(opts) do
{:ok, struct(__MODULE__, opts)}
end
@spec encode(t) :: {:ok, binary} | {:error, EncodeError.t()}
def encode(
%__MODULE__{
seq_number: seq_number,
power_level: _power_level,
timeout: timeout
} = command
) do
with {:ok, encoded} <-
Encoding.encode_and_validate_args(
command,
%{
power_level: {:encode_with, Powerlevel, :encode_power_level},
timeout: :byte
}
) do
binary =
Packet.header(seq_number) <>
<<0x73, 0x01, encoded.power_level, timeout>>
{:ok, binary}
end
end
@spec handle_response(t, Packet.t()) ::
{:continue, t}
| {:done, {:error, :nack_response}}
| {:done, Powerlevel.power_level_report()}
| {:retry, t}
def handle_response(%__MODULE__{seq_number: seq_number} = _command, %Packet{
seq_number: seq_number,
types: [:ack_response]
}) do
{:done, :ok}
end
def handle_response(%__MODULE__{seq_number: seq_number, retries: 0}, %Packet{
seq_number: seq_number,
types: [:nack_response]
}) do
{:done, {:error, :nack_response}}
end
def handle_response(%__MODULE__{seq_number: seq_number, retries: n} = command, %Packet{
seq_number: seq_number,
types: [:nack_response]
}) do
{:retry, %{command | retries: n - 1}}
end
def handle_response(
_,
%Packet{body: %{command_class: :powerlevel, command: :power_level_report, value: value}}
) do
{:done, {:ok, value}}
end
def handle_response(
%__MODULE__{seq_number: seq_number} = command,
%Packet{
seq_number: seq_number,
types: [:nack_response, :nack_waiting]
} = packet
) do
if Packet.sleeping_delay?(packet) do
{:queued, command}
else
{:continue, command}
end
end
def handle_response(command, _), do: {:continue, command}
end
|
lib/grizzly/command_class/powerlevel/set.ex
| 0.84241
| 0.556159
|
set.ex
|
starcoder
|
defmodule Grizzly.Security do
@moduledoc """
Helpers for security
"""
import Bitwise
@type key :: :s2_unauthenticated | :s2_authenticated | :s2_access_control | :s0
@type key_byte :: 0x01 | 0x02 | 0x04 | 0x80
@typedoc """
Possible key exchange failures
- `:none` - Bootstrapping was successful
- `:key` - No match between requested and granted keys
- `:scheme` - no scheme is supported by the controller or joining node
- `:decrypt` - joining node failed to decrypt the input pin from the value. Wrong input value/DSK from user
- `:cancel` - user has canceled the S2 bootstrapping
- `:auth` - the echo kex change frame does not match the earlier exchanged frame
- `:get` - the joining node requested a key that was not granted by the controller at an earlier stage
- `:verify` - the joining node cannot verify and decrypt the exchanged key
- `:report` - the including node transmitted a frame containing a different key than what was currently being exchanged
"""
@type key_exchange_fail_type ::
:none | :key | :scheme | :curves | :decrypt | :cancel | :auth | :get | :verify | :report
@spec byte_to_keys(byte) :: [key]
def byte_to_keys(granted_keys_byte) do
<<s0::size(1), _::size(4), ac::size(1), auth::size(1), unauth::size(1)>> =
<<granted_keys_byte>>
keys = [s0: s0, ac: ac, auth: auth, unauth: unauth]
Enum.reduce(keys, [], fn
{:s0, 1}, acc -> acc ++ [:s0]
{:ac, 1}, acc -> acc ++ [:s2_access_control]
{:auth, 1}, acc -> acc ++ [:s2_authenticated]
{:unauth, 1}, acc -> acc ++ [:s2_unauthenticated]
{_, 0}, acc -> acc
end)
end
@spec keys_to_byte([key]) :: byte
def keys_to_byte(keys) do
Enum.reduce(keys, 0, fn key, byte -> byte ||| key_byte(key) end)
end
@doc """
Validate the user input pin length, should be a 16 bit number
"""
@spec validate_user_input_pin_length(non_neg_integer()) :: :valid | :invalid
def validate_user_input_pin_length(n) when n >= 0 and n <= 65535, do: :valid
def validate_user_input_pin_length(_), do: :invalid
@doc """
Decode a byte repersentation of the key exchanged failed type
"""
@spec failed_type_from_byte(byte()) :: key_exchange_fail_type() | :unk
def failed_type_from_byte(0x00), do: :none
def failed_type_from_byte(0x01), do: :key
def failed_type_from_byte(0x02), do: :scheme
def failed_type_from_byte(0x03), do: :curves
def failed_type_from_byte(0x05), do: :decrypt
def failed_type_from_byte(0x06), do: :cancel
def failed_type_from_byte(0x07), do: :auth
def failed_type_from_byte(0x08), do: :get
def failed_type_from_byte(0x09), do: :verify
def failed_type_from_byte(0x0A), do: :report
def failed_type_from_byte(_), do: :unk
@doc """
Get the byte repersentation of a key.
The key `:none` is an invalid key to encode to,
so this function does not support encoding to that
key.
"""
@spec key_byte(key) :: key_byte()
def key_byte(:s0), do: 0x80
def key_byte(:s2_access_control), do: 0x04
def key_byte(:s2_authenticated), do: 0x02
def key_byte(:s2_unauthenticated), do: 0x01
@doc """
Gets the highest security level key from a key list
Since Z-Wave will work at the highest S2 security group
available on a node, if multiple groups are in a list of keys
it will assume that highest level is the security level of the node
who provided this list.
If the node S0 security Z-Wave will response with granted keys
with the lone key being S0.
"""
@spec get_highest_level([key]) :: key | :none
def get_highest_level([]), do: :none
def get_highest_level([:s0]), do: :s0
def get_highest_level(keys) do
Enum.reduce(keys, fn
:s2_access_control, _ ->
:s2_access_control
:s2_authenticated, last_highest when last_highest != :s2_access_control ->
:s2_authenticated
:s2_unauthenticated, last_highest
when last_highest not in [:s2_authenticated, :s2_access_control] ->
:s2_unauthenticated
:s2_unauthenticated, :s2_authenticated ->
:s2_authenticated
:s2_unauthenticated, :s2_access_control ->
:s2_access_control
_, last_highest ->
last_highest
end)
end
end
|
lib/grizzly/security.ex
| 0.853027
| 0.523968
|
security.ex
|
starcoder
|
defmodule EctoTestDSL.Variants.PhoenixClassic.Insert do
use EctoTestDSL.Drink.Me
alias T.Variants.PhoenixClassic.Insert, as: ThisVariant
alias T.Parse.Start
alias T.Parse.Callbacks
import FlowAssertions.Define.BodyParts
alias T.Variants.Common.DefaultFunctions
# ------------------- Workflows -----------------------------------------
use T.Run.Steps
def workflows() do
common = [
:repo_setup,
:existing_ids,
:params,
:try_params_insertion,
]
%{
error: common ++ [
[:error_content, uses: [:try_params_insertion]],
[:refute_valid_changeset, uses: [:error_content]],
[:example_specific_changeset_checks, uses: [:error_content]],
[:as_cast_changeset_checks, uses: [:error_content]],
:assert_no_insertion,
:postcheck
],
success: common ++ [
[:ok_content, uses: [:try_params_insertion]],
[:check_against_given_fields, uses: [:ok_content]],
[:check_against_earlier_example, uses: [:ok_content]],
[:as_cast_field_checks, uses: [:ok_content]],
[:assert_id_inserted, uses: [:ok_content]],
:postcheck,
]
}
end
# ------------------- Startup -----------------------------------------
def start(opts) do
opts = Keyword.merge(default_start_opts(), opts)
Start.start_with_variant(ThisVariant, opts)
end
defp default_start_opts, do: [
insert_with: &DefaultFunctions.plain_insert/2,
existing_ids_with: &DefaultFunctions.existing_ids/1,
format: :phoenix,
usually_ignore: [],
]
# ------------------- Hook functions -----------------------------------------
def hook(:start, top_level, []) do
assert_valid_keys(top_level)
top_level
end
def hook(:workflow, top_level, [workflow_name]) do
assert_valid_workflow_name(workflow_name)
top_level
end
defp assert_valid_keys(top_level) do
required_keys = [:schema, :examples_module, :repo] ++ Keyword.keys(default_start_opts())
optional_keys = []
top_level
|> Callbacks.validate_top_level_keys(required_keys, optional_keys)
end
defp assert_valid_workflow_name(workflow_name) do
workflows = Map.keys(workflows())
elaborate_assert(
workflow_name in workflows,
"The PhoenixClassic.Insert variant only allows these workflows: #{inspect workflows}",
left: workflow_name
)
end
# ----------------------------------------------------------------------------
defmacro __using__(_) do
quote do
use EctoTestDSL.Predefines
alias EctoTestDSL.Variants.PhoenixClassic
alias __MODULE__, as: ExamplesModule
def start(opts) do
PhoenixClassic.Insert.start([{:examples_module, ExamplesModule} | opts])
end
defmodule Tester do
use EctoTestDSL.Predefines.Tester
alias T.Run.Steps
end
end
end
end
|
lib/variants/phoenix_classic_insert.ex
| 0.532182
| 0.443239
|
phoenix_classic_insert.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.