code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Stripe.Card do
@moduledoc """
Work with Stripe card objects.
You can:
- Create a card
- Retrieve a card
- Update a card
- Delete a card
All requests require `owner_type` and `owner_id` parameters to be specified.
`owner_type` must be one of the following:
* `:customer`,
* `:recipient`.
`owner_id` must be the ID of the owning object.
This module does not yet support managed accounts.
Does not yet render lists or take options.
Recipients may be deprecated for your version of the API. They have
been replaced by managed accounts (see
https://stripe.com/docs/connect/managed-accounts), which you should use
if you're creating a new platform.
Stripe API reference: https://stripe.com/docs/api#cards
"""
@type t :: %__MODULE__{}
@type source :: :customer | :recipient
defstruct [
:id, :object,
:address_city, :address_country, :address_line1,
:address_line1_check, :address_line2, :address_state,
:address_zip, :address_zip_check, :brand, :country,
:customer, :cvc_check, :dynamic_last4, :exp_month, :exp_year,
:fingerprint, :funding, :last4, :metadata, :name, :recipient,
:tokenization_method
]
@schema %{
account: [:retrieve],
address_city: [:retrieve, :update],
address_country: [:retrieve, :update],
address_line1: [:retrieve, :update],
address_line1_check: [:retrieve],
address_line2: [:retrieve, :update],
address_state: [:retrieve, :update],
address_zip: [:retrieve, :update],
address_zip_check: [:retrieve],
brand: [:retrieve, :update],
country: [:retrieve, :update],
currency: [:retrieve, :update],
customer: [:retrieve, :update],
cvc_check: [:retrieve, :update],
default_for_currency: [:create, :retrieve, :update],
dynamic_last4: [:retrieve],
exp_month: [:retrieve, :update],
exp_year: [:retrieve, :update],
external_account: [:create],
fingerprint: [:retrieve],
funding: [:retrieve],
id: [:retrieve],
last4: [:retrieve],
metadata: [:create, :retrieve, :update],
name: [:retrieve, :update],
object: [:retrieve],
recipient: [:retrieve],
source: [:create],
three_d_secure: [:retrieve],
tokenization_method: [:retrieve]
}
@nullable_keys []
defp endpoint_for_owner(owner_type, owner_id) do
case owner_type do
:customer -> "customers/#{owner_id}/sources"
:account -> "accounts/#{owner_id}/external_accounts"
:recipient -> "recipients/#{owner_id}/cards" # Deprecated
end
end
@doc """
Create a card.
This requires a `token` created by a library like Stripe.js.
For PCI compliance reasons you should not send a card's number or CVC
to your own server.
If you want to create a card with your server without a token, you
can use the low-level API.
"""
@spec create(source, String.t, String.t, Keyword.t) :: {:ok, t} | {:error, Stripe.api_error_struct}
def create(owner_type, owner_id, token, opts \\ []) do
endpoint = endpoint_for_owner(owner_type, owner_id)
changes = to_create_body(owner_type, token)
Stripe.Request.create(endpoint, changes, @schema, opts)
end
@spec to_create_body(source, String.t) :: map
defp to_create_body(owner_type, token) do
case owner_type do
:customer -> %{source: token}
:recipient -> %{external_account: token}
end
end
@doc """
Retrieve a card.
"""
@spec retrieve(source, String.t, String.t, Keyword.t) :: {:ok, t} | {:error, Stripe.api_error_struct}
def retrieve(owner_type, owner_id, card_id, opts \\ []) do
endpoint = endpoint_for_owner(owner_type, owner_id) <> "/" <> card_id
Stripe.Request.retrieve(endpoint, opts)
end
@doc """
Update a card.
Takes the `id` and a map of changes
"""
@spec update(source, String.t, String.t, map, Keyword.t) :: {:ok, t} | {:error, Stripe.api_error_struct}
def update(owner_type, owner_id, card_id, changes, opts \\ []) do
endpoint = endpoint_for_owner(owner_type, owner_id) <> "/" <> card_id
Stripe.Request.update(endpoint, changes, @schema, @nullable_keys, opts)
end
@doc """
Delete a card.
"""
@spec delete(source, String.t, String.t, Keyword.t) :: :ok | {:error, Stripe.api_error_struct}
def delete(owner_type, owner_id, card_id, opts \\ []) do
endpoint = endpoint_for_owner(owner_type, owner_id) <> "/" <> card_id
Stripe.Request.delete(endpoint, %{}, opts)
end
@doc """
List all cards.
"""
@spec list(source, String.t, map, Keyword.t) :: {:ok, Stripe.List.t} | {:error, Stripe.api_error_struct}
def list(owner_type, owner_id, params \\ %{}, opts \\ []) do
endpoint = endpoint_for_owner(owner_type, owner_id)
params = Map.merge(params, %{"object" => "card"})
Stripe.Request.retrieve(params, endpoint, opts)
end
end
|
lib/stripe/card.ex
| 0.815747
| 0.539469
|
card.ex
|
starcoder
|
defmodule HeBroker.RouteMap do
@moduledoc """
This module implements a method of mapping consumer services to their
subscribed topics so publishers can properly broadcast messages
Each route is composed by a topic (the string that will be used to define which
_services_ you want to send your message to. eg: "user", "user:create", "email:send")
and a collection of _services_. Those _services_ are the consumers of
the messages sent to the specified topic. The services are composed by a _cast_
function (a function to be executed when the publisher wants to send a message
to the topic without expecting a return), a _call_ function (a function to be
executed when the publisher wants to send a message to the topic expecting one
response) and a pool of processes that can consume those messages using the
specified functions.
The data representing the routemap should be considered opaque, any code that
assumes any knowledge about the structure of the routemap is prone to fail since
this might change in the future.
"""
alias HeBroker.RouteMap.Service
alias HeBroker.Request
@type partial :: ((topic, message :: any, Request.t) -> Service.call_return | Service.cast_return)
@type topic :: String.t
@type service :: Service.t
@opaque t :: :ets.tid
@spec new() :: t
@doc """
Returns a new route map
"""
def new(params \\ []) do
opts = Enum.reject(params, &match?({:name, _}, &1))
:ets.new(params[:name] || :hebroker, opts)
end
@spec callback(Service.t, :call | :cast) :: partial | nil
@doc """
Returns a partial based on the service's callback.
If the service doesn't provide a proper callback, `nil` is returned
"""
def callback(%Service{call: nil}, :call),
do: nil
def callback(%Service{cast: nil}, :cast),
do: nil
def callback(%Service{cast: cast, pool: pool}, :cast),
do: build_partial(cast, pool)
def callback(%Service{call: call, pool: pool}, :call),
do: build_partial(call, pool)
@spec build_partial(Service.cast | Service.call, Service.pool) :: partial
@docp """
Wraps `function` in a partial providing the head pid from `pool`, returning a
3-fun callback
"""
defp build_partial(function, pool) do
pid = Service.pool_out(pool)
fn topic, message, request ->
function.(pid, topic, message, request)
end
end
@spec services_on_topic(t, topic) :: [service]
@doc """
Returns the services on `topic` on `routemap`
"""
def services_on_topic(routemap, topic) do
case :ets.lookup(routemap, topic) do
[{^topic, services}] ->
services
[] ->
[]
end
end
@spec upsert_topic(t, topic, pid, Service.cast, Service.call) :: no_return
@doc """
Upserts the service on the `routemap` putting it on the specified `topic`
using it's defined `cast` and `call` functions.
If the topic does not exists in the routemap, it will be created.
If the service is already on the topic in the routemap, `pid` will be added to
it's pool
"""
def upsert_topic(routemap, topic, pid, cast, call) do
services = case :ets.lookup(routemap, topic) do
[{^topic, services}] ->
services
[] ->
[]
end
updated_services =
Enum.reduce(services, {[], false}, fn
service = %Service{cast: ^cast, call: ^call}, {acc, false} ->
s = %Service{service| pool: Service.pool_in(service.pool, pid)}
{[s| acc], true}
s, {acc, status} ->
{[s| acc], status}
end)
|> case do
{services, true} ->
services
{services, false} ->
s = %Service{cast: cast, call: call, pool: Service.pool_new(pid)}
[s| services]
end
:ets.insert(routemap, {topic, updated_services})
end
@spec remove_consumer(t, topic, consumer :: pid) :: no_return
@doc """
Removes the specified `pid` from the topic.
Might remove the whole `topic` from the `routemap` if `pid` is the only
subscribed consumer
"""
def remove_consumer(routemap, topic, pid) do
case :ets.lookup(routemap, topic) do
[{^topic, services}] ->
services
|> Enum.reduce([], fn service = %Service{pool: pool}, acc ->
if Service.pool_member?(pool, pid) do
pids2 = Service.pool_delete(pool, pid)
if Service.pool_empty?(pids2) do
# Pid was the only element of the pool and the service now doesn't
# have any enabled consumer and thus must be removed from the topic
acc
else
# Pid is removed from the pool and the pool isn't empty
[%{service| pool: pids2}| acc]
end
else
[service| acc]
end
end)
|> case do
[] ->
# By removing the consumer, the topic doesn't has any subscribed
# service consumer anymore
:ets.delete(routemap, topic)
services ->
:ets.insert(routemap, {topic, services})
end
[] ->
:ok
end
end
end
|
lib/he_broker/route_map.ex
| 0.849815
| 0.55911
|
route_map.ex
|
starcoder
|
defmodule AWS.Route53Domains do
@moduledoc """
Amazon Route 53 API actions let you register domain names and perform
related operations.
"""
@doc """
Accepts the transfer of a domain from another AWS account to the current
AWS account. You initiate a transfer between AWS accounts using
[TransferDomainToAnotherAwsAccount](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_TransferDomainToAnotherAwsAccount.html).
Use either
[ListOperations](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ListOperations.html)
or
[GetOperationDetail](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_GetOperationDetail.html)
to determine whether the operation succeeded.
[GetOperationDetail](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_GetOperationDetail.html)
provides additional information, for example, `Domain Transfer from Aws
Account 111122223333 has been cancelled`.
"""
def accept_domain_transfer_from_another_aws_account(client, input, options \\ []) do
request(client, "AcceptDomainTransferFromAnotherAwsAccount", input, options)
end
@doc """
Cancels the transfer of a domain from the current AWS account to another
AWS account. You initiate a transfer between AWS accounts using
[TransferDomainToAnotherAwsAccount](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_TransferDomainToAnotherAwsAccount.html).
<important> You must cancel the transfer before the other AWS account
accepts the transfer using
[AcceptDomainTransferFromAnotherAwsAccount](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_AcceptDomainTransferFromAnotherAwsAccount.html).
</important> Use either
[ListOperations](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ListOperations.html)
or
[GetOperationDetail](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_GetOperationDetail.html)
to determine whether the operation succeeded.
[GetOperationDetail](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_GetOperationDetail.html)
provides additional information, for example, `Domain Transfer from Aws
Account 111122223333 has been cancelled`.
"""
def cancel_domain_transfer_to_another_aws_account(client, input, options \\ []) do
request(client, "CancelDomainTransferToAnotherAwsAccount", input, options)
end
@doc """
This operation checks the availability of one domain name. Note that if the
availability status of a domain is pending, you must submit another request
to determine the availability of the domain name.
"""
def check_domain_availability(client, input, options \\ []) do
request(client, "CheckDomainAvailability", input, options)
end
@doc """
Checks whether a domain name can be transferred to Amazon Route 53.
"""
def check_domain_transferability(client, input, options \\ []) do
request(client, "CheckDomainTransferability", input, options)
end
@doc """
This operation deletes the specified tags for a domain.
All tag operations are eventually consistent; subsequent operations might
not immediately represent all issued operations.
"""
def delete_tags_for_domain(client, input, options \\ []) do
request(client, "DeleteTagsForDomain", input, options)
end
@doc """
This operation disables automatic renewal of domain registration for the
specified domain.
"""
def disable_domain_auto_renew(client, input, options \\ []) do
request(client, "DisableDomainAutoRenew", input, options)
end
@doc """
This operation removes the transfer lock on the domain (specifically the
`clientTransferProhibited` status) to allow domain transfers. We recommend
you refrain from performing this action unless you intend to transfer the
domain to a different registrar. Successful submission returns an operation
ID that you can use to track the progress and completion of the action. If
the request is not completed successfully, the domain registrant will be
notified by email.
"""
def disable_domain_transfer_lock(client, input, options \\ []) do
request(client, "DisableDomainTransferLock", input, options)
end
@doc """
This operation configures Amazon Route 53 to automatically renew the
specified domain before the domain registration expires. The cost of
renewing your domain registration is billed to your AWS account.
The period during which you can renew a domain name varies by TLD. For a
list of TLDs and their renewal policies, see [Domains That You Can Register
with Amazon Route
53](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/registrar-tld-list.html)
in the *Amazon Route 53 Developer Guide*. Route 53 requires that you renew
before the end of the renewal period so we can complete processing before
the deadline.
"""
def enable_domain_auto_renew(client, input, options \\ []) do
request(client, "EnableDomainAutoRenew", input, options)
end
@doc """
This operation sets the transfer lock on the domain (specifically the
`clientTransferProhibited` status) to prevent domain transfers. Successful
submission returns an operation ID that you can use to track the progress
and completion of the action. If the request is not completed successfully,
the domain registrant will be notified by email.
"""
def enable_domain_transfer_lock(client, input, options \\ []) do
request(client, "EnableDomainTransferLock", input, options)
end
@doc """
For operations that require confirmation that the email address for the
registrant contact is valid, such as registering a new domain, this
operation returns information about whether the registrant contact has
responded.
If you want us to resend the email, use the
`ResendContactReachabilityEmail` operation.
"""
def get_contact_reachability_status(client, input, options \\ []) do
request(client, "GetContactReachabilityStatus", input, options)
end
@doc """
This operation returns detailed information about a specified domain that
is associated with the current AWS account. Contact information for the
domain is also returned as part of the output.
"""
def get_domain_detail(client, input, options \\ []) do
request(client, "GetDomainDetail", input, options)
end
@doc """
The GetDomainSuggestions operation returns a list of suggested domain
names.
"""
def get_domain_suggestions(client, input, options \\ []) do
request(client, "GetDomainSuggestions", input, options)
end
@doc """
This operation returns the current status of an operation that is not
completed.
"""
def get_operation_detail(client, input, options \\ []) do
request(client, "GetOperationDetail", input, options)
end
@doc """
This operation returns all the domain names registered with Amazon Route 53
for the current AWS account.
"""
def list_domains(client, input, options \\ []) do
request(client, "ListDomains", input, options)
end
@doc """
Returns information about all of the operations that return an operation ID
and that have ever been performed on domains that were registered by the
current account.
"""
def list_operations(client, input, options \\ []) do
request(client, "ListOperations", input, options)
end
@doc """
This operation returns all of the tags that are associated with the
specified domain.
All tag operations are eventually consistent; subsequent operations might
not immediately represent all issued operations.
"""
def list_tags_for_domain(client, input, options \\ []) do
request(client, "ListTagsForDomain", input, options)
end
@doc """
This operation registers a domain. Domains are registered either by Amazon
Registrar (for .com, .net, and .org domains) or by our registrar associate,
Gandi (for all other domains). For some top-level domains (TLDs), this
operation requires extra parameters.
When you register a domain, Amazon Route 53 does the following:
<ul> <li> Creates a Route 53 hosted zone that has the same name as the
domain. Route 53 assigns four name servers to your hosted zone and
automatically updates your domain registration with the names of these name
servers.
</li> <li> Enables autorenew, so your domain registration will renew
automatically each year. We'll notify you in advance of the renewal date so
you can choose whether to renew the registration.
</li> <li> Optionally enables privacy protection, so WHOIS queries return
contact information either for Amazon Registrar (for .com, .net, and .org
domains) or for our registrar associate, Gandi (for all other TLDs). If you
don't enable privacy protection, WHOIS queries return the information that
you entered for the registrant, admin, and tech contacts.
</li> <li> If registration is successful, returns an operation ID that you
can use to track the progress and completion of the action. If the request
is not completed successfully, the domain registrant is notified by email.
</li> <li> Charges your AWS account an amount based on the top-level
domain. For more information, see [Amazon Route 53
Pricing](http://aws.amazon.com/route53/pricing/).
</li> </ul>
"""
def register_domain(client, input, options \\ []) do
request(client, "RegisterDomain", input, options)
end
@doc """
Rejects the transfer of a domain from another AWS account to the current
AWS account. You initiate a transfer between AWS accounts using
[TransferDomainToAnotherAwsAccount](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_TransferDomainToAnotherAwsAccount.html).
Use either
[ListOperations](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ListOperations.html)
or
[GetOperationDetail](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_GetOperationDetail.html)
to determine whether the operation succeeded.
[GetOperationDetail](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_GetOperationDetail.html)
provides additional information, for example, `Domain Transfer from Aws
Account 111122223333 has been cancelled`.
"""
def reject_domain_transfer_from_another_aws_account(client, input, options \\ []) do
request(client, "RejectDomainTransferFromAnotherAwsAccount", input, options)
end
@doc """
This operation renews a domain for the specified number of years. The cost
of renewing your domain is billed to your AWS account.
We recommend that you renew your domain several weeks before the expiration
date. Some TLD registries delete domains before the expiration date if you
haven't renewed far enough in advance. For more information about renewing
domain registration, see [Renewing Registration for a
Domain](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/domain-renew.html)
in the *Amazon Route 53 Developer Guide*.
"""
def renew_domain(client, input, options \\ []) do
request(client, "RenewDomain", input, options)
end
@doc """
For operations that require confirmation that the email address for the
registrant contact is valid, such as registering a new domain, this
operation resends the confirmation email to the current email address for
the registrant contact.
"""
def resend_contact_reachability_email(client, input, options \\ []) do
request(client, "ResendContactReachabilityEmail", input, options)
end
@doc """
This operation returns the AuthCode for the domain. To transfer a domain to
another registrar, you provide this value to the new registrar.
"""
def retrieve_domain_auth_code(client, input, options \\ []) do
request(client, "RetrieveDomainAuthCode", input, options)
end
@doc """
Transfers a domain from another registrar to Amazon Route 53. When the
transfer is complete, the domain is registered either with Amazon Registrar
(for .com, .net, and .org domains) or with our registrar associate, Gandi
(for all other TLDs).
For more information about transferring domains, see the following topics:
<ul> <li> For transfer requirements, a detailed procedure, and information
about viewing the status of a domain that you're transferring to Route 53,
see [Transferring Registration for a Domain to Amazon Route
53](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/domain-transfer-to-route-53.html)
in the *Amazon Route 53 Developer Guide*.
</li> <li> For information about how to transfer a domain from one AWS
account to another, see
[TransferDomainToAnotherAwsAccount](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_TransferDomainToAnotherAwsAccount.html).
</li> <li> For information about how to transfer a domain to another domain
registrar, see [Transferring a Domain from Amazon Route 53 to Another
Registrar](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/domain-transfer-from-route-53.html)
in the *Amazon Route 53 Developer Guide*.
</li> </ul> If the registrar for your domain is also the DNS service
provider for the domain, we highly recommend that you transfer your DNS
service to Route 53 or to another DNS service provider before you transfer
your registration. Some registrars provide free DNS service when you
purchase a domain registration. When you transfer the registration, the
previous registrar will not renew your domain registration and could end
your DNS service at any time.
<important> If the registrar for your domain is also the DNS service
provider for the domain and you don't transfer DNS service to another
provider, your website, email, and the web applications associated with the
domain might become unavailable.
</important> If the transfer is successful, this method returns an
operation ID that you can use to track the progress and completion of the
action. If the transfer doesn't complete successfully, the domain
registrant will be notified by email.
"""
def transfer_domain(client, input, options \\ []) do
request(client, "TransferDomain", input, options)
end
@doc """
Transfers a domain from the current AWS account to another AWS account.
Note the following:
<ul> <li> The AWS account that you're transferring the domain to must
accept the transfer. If the other account doesn't accept the transfer
within 3 days, we cancel the transfer. See
[AcceptDomainTransferFromAnotherAwsAccount](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_AcceptDomainTransferFromAnotherAwsAccount.html).
</li> <li> You can cancel the transfer before the other account accepts it.
See
[CancelDomainTransferToAnotherAwsAccount](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_CancelDomainTransferToAnotherAwsAccount.html).
</li> <li> The other account can reject the transfer. See
[RejectDomainTransferFromAnotherAwsAccount](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_RejectDomainTransferFromAnotherAwsAccount.html).
</li> </ul> <important> When you transfer a domain from one AWS account to
another, Route 53 doesn't transfer the hosted zone that is associated with
the domain. DNS resolution isn't affected if the domain and the hosted zone
are owned by separate accounts, so transferring the hosted zone is
optional. For information about transferring the hosted zone to another AWS
account, see [Migrating a Hosted Zone to a Different AWS
Account](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/hosted-zones-migrating.html)
in the *Amazon Route 53 Developer Guide*.
</important> Use either
[ListOperations](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ListOperations.html)
or
[GetOperationDetail](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_GetOperationDetail.html)
to determine whether the operation succeeded.
[GetOperationDetail](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_GetOperationDetail.html)
provides additional information, for example, `Domain Transfer from Aws
Account 111122223333 has been cancelled`.
"""
def transfer_domain_to_another_aws_account(client, input, options \\ []) do
request(client, "TransferDomainToAnotherAwsAccount", input, options)
end
@doc """
This operation updates the contact information for a particular domain. You
must specify information for at least one contact: registrant,
administrator, or technical.
If the update is successful, this method returns an operation ID that you
can use to track the progress and completion of the action. If the request
is not completed successfully, the domain registrant will be notified by
email.
"""
def update_domain_contact(client, input, options \\ []) do
request(client, "UpdateDomainContact", input, options)
end
@doc """
This operation updates the specified domain contact's privacy setting. When
privacy protection is enabled, contact information such as email address is
replaced either with contact information for Amazon Registrar (for .com,
.net, and .org domains) or with contact information for our registrar
associate, Gandi.
This operation affects only the contact information for the specified
contact type (registrant, administrator, or tech). If the request succeeds,
Amazon Route 53 returns an operation ID that you can use with
[GetOperationDetail](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_GetOperationDetail.html)
to track the progress and completion of the action. If the request doesn't
complete successfully, the domain registrant will be notified by email.
<important> By disabling the privacy service via API, you consent to the
publication of the contact information provided for this domain via the
public WHOIS database. You certify that you are the registrant of this
domain name and have the authority to make this decision. You may withdraw
your consent at any time by enabling privacy protection using either
`UpdateDomainContactPrivacy` or the Route 53 console. Enabling privacy
protection removes the contact information provided for this domain from
the WHOIS database. For more information on our privacy practices, see
[https://aws.amazon.com/privacy/](https://aws.amazon.com/privacy/).
</important>
"""
def update_domain_contact_privacy(client, input, options \\ []) do
request(client, "UpdateDomainContactPrivacy", input, options)
end
@doc """
This operation replaces the current set of name servers for the domain with
the specified set of name servers. If you use Amazon Route 53 as your DNS
service, specify the four name servers in the delegation set for the hosted
zone for the domain.
If successful, this operation returns an operation ID that you can use to
track the progress and completion of the action. If the request is not
completed successfully, the domain registrant will be notified by email.
"""
def update_domain_nameservers(client, input, options \\ []) do
request(client, "UpdateDomainNameservers", input, options)
end
@doc """
This operation adds or updates tags for a specified domain.
All tag operations are eventually consistent; subsequent operations might
not immediately represent all issued operations.
"""
def update_tags_for_domain(client, input, options \\ []) do
request(client, "UpdateTagsForDomain", input, options)
end
@doc """
Returns all the domain-related billing records for the current AWS account
for a specified period
"""
def view_billing(client, input, options \\ []) do
request(client, "ViewBilling", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "route53domains"}
host = build_host("route53domains", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "Route53Domains_v20140515.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/route53_domains.ex
| 0.895933
| 0.463748
|
route53_domains.ex
|
starcoder
|
defmodule Momento.Add do
import Momento.Guards
import Momento.Helpers
@moduledoc """
This module holds all the various `add/3` methods.
"""
@doc """
Add a specified amount of time. Available atoms are `:years`, `:months`, `:days`, `:hours`, `:minutes`, `:seconds`,
`:milliseconds` and `:microseconds`.
## Examples
iex> Momento.date! |> Momento.add(5, :years)
%DateTime{calendar: Calendar.ISO, day: 1, hour: 22, microsecond: {703979, 6},
minute: 34, month: 7, second: 50, std_offset: 0, time_zone: "Etc/UTC",
utc_offset: 0, year: 2021, zone_abbr: "UTC"}
...> Momento.date! |> Momento.add(5, :days)
%DateTime{calendar: Calendar.ISO, day: 6, hour: 22, microsecond: {904112, 6},
minute: 37, month: 7, second: 56, std_offset: 0, time_zone: "Etc/UTC",
utc_offset: 0, year: 2016, zone_abbr: "UTC"}
...> Momento.date! |> Momento.add(5, :hours) |> Momento.add(15, :minutes)
%DateTime{calendar: Calendar.ISO, day: 2, hour: 3, microsecond: {546990, 6},
minute: 59, month: 7, second: 26, std_offset: 0, time_zone: "Etc/UTC",
utc_offset: 0, year: 2016, zone_abbr: "UTC"}
"""
@spec add(DateTime.t, integer, atom) :: DateTime.t
# Singular to plural
def add(datetime, num, :year), do: add(datetime, num, :years)
def add(datetime, num, :month), do: add(datetime, num, :months)
def add(datetime, num, :day), do: add(datetime, num, :days)
def add(datetime, num, :hour), do: add(datetime, num, :hours)
def add(datetime, num, :minute), do: add(datetime, num, :minutes)
def add(datetime, num, :second), do: add(datetime, num, :seconds)
def add(datetime, num, :millisecond), do: add(datetime, num, :milliseconds)
def add(datetime, num, :microsecond), do: add(datetime, num, :microseconds)
# Years
# Base case
def add(%DateTime{year: year} = datetime, num, :years)
when natural?(num),
do: %DateTime{datetime | year: year + num}
# Months
# Base case
def add(%DateTime{month: month} = datetime, num, :months)
when natural?(num) and month + num <= 12,
do: %DateTime{datetime | month: month + num}
# Many years worth of months
def add(%DateTime{} = datetime, num, :months)
when positive?(num) and num > 11
do
years = floor(num / 12)
add(datetime, years, :years) |> add(num - years * 12, :months)
end
# Rollover months to the next year
def add(%DateTime{month: month} = datetime, num, :months)
when positive?(num) and month + num > 12,
do: add(%DateTime{datetime | month: 1}, 1, :years) |> add(num - month - 1, :months)
# Days
# Base case
def add(%DateTime{month: month, day: day} = datetime, num, :days)
when natural?(num) and day + num <= days_in_month(month),
do: %DateTime{datetime | day: day + num}
# Many months worth of days
def add(%DateTime{month: month} = datetime, num, :days)
when positive?(num) and num > days_in_month(month),
do: add(datetime, 1, :months) |> add(num - days_in_month(month), :days)
# Rollver days to be the next month
def add(%DateTime{month: month, day: day} = datetime, num, :days)
when positive?(num) and day + num > days_in_month(month),
do: add(%DateTime{datetime | day: 1}, 1, :months) |> add(num - (days_in_month(month) - day) - 1, :days)
# Hours
# Base case
def add(%DateTime{hour: hour} = datetime, num, :hours)
when natural?(num) and num + hour < 24,
do: %DateTime{datetime | hour: num + hour}
# Many days worth of hours
def add(%DateTime{} = datetime, num, :hours)
when positive?(num) and num > 24
do
days = floor(num / 24)
add(datetime, days, :days) |> add(num - days * 24, :hours)
end
# Rollover hours to be the next day
def add(%DateTime{hour: hour} = datetime, num, :hours)
when positive?(num) and num + hour > 23,
do: add(%DateTime{datetime | hour: 0}, 1, :days) |> add(num - (24 - hour), :hours)
# Minutes
# Base case
def add(%DateTime{minute: minute} = datetime, num, :minutes)
when natural?(num) and num + minute < 60,
do: %DateTime{datetime | minute: num + minute}
# Many hours worth o fminutes
def add(%DateTime{} = datetime, num, :minutes)
when positive?(num) and num > 60
do
hours = floor(num / 60)
add(datetime, hours, :hours) |> add(num - hours * 60, :minutes)
end
# Rollover minutes to be the next hour
def add(%DateTime{minute: minute} = datetime, num, :minutes)
when positive?(num) and num + minute >= 60,
do: add(%DateTime{datetime | minute: 0}, 1, :hours) |> add(num - (60 - minute) - 1, :minutes)
# Seconds
# Base case
def add(%DateTime{second: second} = datetime, num, :seconds)
when natural?(num) and num + second < 60,
do: %DateTime{datetime | second: num + second}
# Many minutes worth of seconds
def add(%DateTime{} = datetime, num, :seconds)
when positive?(num) and num > 60
do
minutes = floor(num / 60)
add(datetime, minutes, :minutes) |> add(num - minutes * 60, :seconds)
end
# Rollover seconds to be the next minute
def add(%DateTime{second: second} = datetime, num, :seconds)
when positive?(num) and num + second >= 60,
do: add(%DateTime{datetime | second: 0}, 1, :minutes) |> add(num - (60 - second) - 1, :seconds)
# Milliseconds
# TODO: This doesn't seem right and is incomplete
# Base case
def add(%DateTime{microsecond: {microsecond, precision}} = datetime, num, :milliseconds)
when natural?(num) and num <= 999,
do: %DateTime{datetime | microsecond: {microsecond + num * millisecond_factor(precision), precision}}
# Many seconds worth of milliseconds
def add(%DateTime{microsecond: {_, precision}} = datetime, num, :milliseconds)
when positive?(num) and num > 999 and precision >= 3
do
seconds = Float.floor(num / millisecond_factor(precision)) |> round
add(datetime, seconds, :seconds) |> add(num - seconds * millisecond_factor(precision), :milliseconds)
end
# Microseconds
# Base case
def add(%DateTime{microsecond: {microsecond, precision}} = datetime, num, :microseconds)
when natural?(num) and precision === 6 and microsecond + num <= 999999,
do: %DateTime{datetime | microsecond: {microsecond + num, precision}}
# Many seconds worth of microseconds
def add(%DateTime{microsecond: {_, precision}} = datetime, num, :microseconds)
when positive?(num) and precision === 6 and num > 999999
do
seconds = Float.floor(num / microsecond_factor(precision)) |> round
add(datetime, seconds, :seconds) |> add(num - seconds * microsecond_factor(precision), :microseconds)
end
end
|
lib/add.ex
| 0.734786
| 0.656204
|
add.ex
|
starcoder
|
defprotocol Timex.Comparable do
@moduledoc """
This protocol is used for comparing and diffing different date/time representations
"""
alias Timex.Types
@type granularity ::
:year
| :years
| :month
| :months
| :week
| :weeks
| :calendar_week
| :calendar_weeks
| :day
| :days
| :hour
| :hours
| :minute
| :minutes
| :second
| :seconds
| :millisecond
| :milliseconds
| :microsecond
| :microseconds
| :duration
@type constants :: :epoch | :zero | :distant_past | :distant_future
@type comparable ::
Date.t() | DateTime.t() | NaiveDateTime.t() | Types.date() | Types.datetime()
@type compare_result :: -1 | 0 | 1 | {:error, term}
@type diff_result :: Timex.Duration.t() | integer | {:error, term}
@doc """
Compare two date or datetime types.
You can optionally specify a comparison granularity, any of the following:
- :year
- :years
- :month
- :months
- :week
- :weeks
- :calendar_week (weeks of the calendar as opposed to actual weeks in terms of days)
- :calendar_weeks
- :day
- :days
- :hour
- :hours
- :minute
- :minutes
- :second
- :seconds
- :millisecond
- :milliseconds
- :microsecond (default)
- :microseconds
- :duration
and the dates will be compared with the cooresponding accuracy.
The default granularity is `:microsecond`.
- 0: when equal
- -1: when the first date/time comes before the second
- 1: when the first date/time comes after the second
- {:error, reason}: when there was a problem comparing,
perhaps due to a value being passed which is not a valid date/datetime
## Examples
iex> use Timex
iex> date1 = ~D[2014-03-04]
iex> date2 = ~D[2015-03-04]
iex> Timex.compare(date1, date2, :year)
-1
iex> Timex.compare(date2, date1, :year)
1
iex> Timex.compare(date1, date1)
0
"""
@spec compare(comparable, comparable, granularity) :: compare_result
def compare(a, b, granularity \\ :microsecond)
@doc """
Get the difference between two date or datetime types.
You can optionally specify a diff granularity, any of the following:
- :year
- :years
- :month
- :months
- :week
- :weeks
- :calendar_week (weeks of the calendar as opposed to actual weeks in terms of days)
- :calendar_weeks
- :day
- :days
- :hour
- :hours
- :minute
- :minutes
- :second
- :seconds
- :millisecond
- :milliseconds
- :microsecond (default)
- :microseconds
- :duration
and the result will be an integer value of those units or a Duration struct.
The diff value will be negative if `a` comes before `b`, and positive if `a` comes
after `b`. This behaviour mirrors `compare/3`.
When using granularity of :months, the number of days in the month varies. This
behavior mirrors `Timex.shift/2`.
## Examples
iex> use Timex
iex> date1 = ~D[2015-01-28]
iex> date2 = ~D[2015-02-28]
iex> Timex.diff(date1, date2, :month)
-1
iex> Timex.diff(date2, date1, :month)
1
iex> use Timex
iex> date1 = ~D[2015-01-31]
iex> date2 = ~D[2015-02-28]
iex> Timex.diff(date1, date2, :month)
-1
iex> Timex.diff(date2, date1, :month)
0
"""
@spec diff(comparable, comparable, granularity) :: diff_result
def diff(a, b, granularity \\ :microsecond)
end
|
lib/comparable/comparable.ex
| 0.93619
| 0.701726
|
comparable.ex
|
starcoder
|
defmodule Mutation.Mutations do
# Transverses all the mutations
def each_mutation(code_transverse_function) do
Enum.flat_map all_mutations, fn(mutation_map) ->
Enum.reduce mutation_map.functions, [], fn({mutation_function, mutation_reason}, acc) ->
result = code_transverse_function.(mutation_map.tag, mutation_function, mutation_reason)
# For debug
# IO.inspect result
# IO.inspect mutation_reason
Enum.concat(acc, result)
end
end
end
defp all_mutations do
[
%{
tag: :>,
functions: numeric_comparison_functions(:>)
},
%{
tag: :>=,
functions: numeric_comparison_functions(:>=)
},
%{
tag: :<,
functions: numeric_comparison_functions(:<)
},
%{
tag: :<=,
functions: numeric_comparison_functions(:<=)
},
%{
tag: :==,
functions: [
{
fn {:==, meta, [left, right]} -> {:!=, meta, [left, right]} end,
"Write a test to check the result when this equality happens."
}
]
},
%{
tag: :!=,
functions: [
{
fn {:!=, meta, [left, right]} -> {:==, meta, [left, right]} end,
"Write a test to check the result when this inequality happens."
}
]
},
%{
tag: :and,
functions: [
{
fn {:and, meta, [_cond1, cond2]} -> {:and, meta, [true, cond2]} end,
"Write a test to check the result when the left side condition evaluates to false."
},{
fn {:and, meta, [_cond1, cond2]} -> {:and, meta, [false, cond2]} end,
"Write a test to check the result when the left side condition evaluates to true."
},{
fn {:and, meta, [cond1, _cond2]} -> {:and, meta, [cond1, true]} end,
"Write a test to check the result when the right side condition evaluates to false."
},{
fn {:and, meta, [cond1, _cond2]} -> {:and, meta, [cond1, false]} end,
"Write a test to check the result when the right side condition evaluates to true."
},{
fn {:and, meta, [cond1, cond2]} -> {:or, meta, [cond1, cond2]} end,
"Write a test to check the result when the left side and the right side conditions evaluate to true."
}
]
},
%{
tag: :or,
functions: [
{
fn {:or, meta, [_cond1, cond2]} -> {:or, meta, [true, cond2]} end,
"Write a test to check the result when the left side condition evaluates to false."
},{
fn {:or, meta, [_cond1, cond2]} -> {:or, meta, [false, cond2]} end,
"Write a test to check the result when the left side condition evaluates to true."
},{
fn {:or, meta, [cond1, _cond2]} -> {:or, meta, [cond1, true]} end,
"Write a test to check the result when the right side condition evaluates to false."
},{
fn {:or, meta, [cond1, _cond2]} -> {:or, meta, [cond1, false]} end,
"Write a test to check the result when the right side condition evaluates to true."
},{
fn {:or, meta, [cond1, cond2]} -> {:and, meta, [cond1, cond2]} end,
"Write a test to check the result when the left side and the right side conditions evaluate to true."
}
]
},
%{
tag: :if,
functions: [
{
fn {:if, meta, [_cond_arg, code_arg]} -> {:if, meta, [true, code_arg]} end,
"Write a test to check the result of making the condition of this `if` to evaluate to false."
},{
fn {:if, meta, [_cond_arg, code_arg]} -> {:if, meta, [false, code_arg]} end,
"Write a test to check the result of making the condition of this `if` to evaluate to true"
},{
# fn {:if, meta, [_cond_arg, code_arg]} -> {:if, meta, [nil, code_arg]} end,
fn {:if, meta, [cond_arg, code_arg]} -> {:if, meta, [negate_conditional_node(meta, cond_arg), code_arg]} end,
"Write a test to check the result of making the condition of this `if` to evaluate to true"
}
]
}
]
end
defp numeric_comparison_functions(element_name) do
all_comparisons = %{
<: {
fn {^element_name, meta, [left, right]} -> {:<, meta, [left, right]} end,
"Write a test to check the result when the left side is the same than the right"
},
<=: {
fn {^element_name, meta, [left, right]} -> {:<=, meta, [left, right]} end,
"Write a test to check the result when the left side is bigger than the right"
},
>: {
fn {^element_name, meta, [left, right]} -> {:>, meta, [left, right]} end,
"Write a test to check the result when the right side is the same than the left"
},
>=: {
fn {^element_name, meta, [left, right]} -> {:>=, meta, [left, right]} end,
"Write a test to check the result when the right side is bigger than the left"
},
==: {
fn {^element_name, meta, [left, right]} -> {:==, meta, [left, right]} end,
"Write a test to check the result when the left side is a different value from the right"
},
!=: {
fn {^element_name, meta, [left, right]} -> {:!=, meta, [left, right]} end,
"Write a test to check the result when the left side is the same value than the right"
}
}
all_comparisons
|> Map.delete(element_name)
|> Map.values
end
defp negate_conditional_node(meta, conditional_arg) do
{:!, meta, [conditional_arg]}
end
end
|
lib/mutation/mutations.ex
| 0.542136
| 0.663557
|
mutations.ex
|
starcoder
|
defmodule Membrane.Element.Pcap.Source do
@moduledoc """
Element that reads subsequent packets from `pcap` file and sends them
as buffers through the output pad.
"""
use Membrane.Source
use Bunch
alias Membrane.Buffer
alias Membrane.Element.Pcap.Parser
alias ExPcap.Packet
@next_packet &Parser.next_packet/1
def_output_pad :output,
caps: :any
def_options packet_transformer: [
type: :function,
spec: (Packet.t() -> Buffer.t() | nil),
default: &__MODULE__.default_transformer/1,
description: """
This function transforms parsed packet into a buffer.
It is applied on each packet.
It allows enriching buffers with metadata extracted from
packets f.e. packet source address or port.
"""
],
path: [
type: :string,
description: "Path to the .pcap file"
]
defmodule State do
@moduledoc false
@enforce_keys [:transformer, :path]
defstruct @enforce_keys ++ [:parser]
@type t :: %__MODULE__{
transformer: (Packet.t() -> Buffer.t()),
path: binary()
}
end
@impl true
def handle_init(%__MODULE__{path: path, packet_transformer: transformer}) do
{:ok,
%State{
path: path,
transformer: transformer
}}
end
@impl true
def handle_prepared_to_playing(_context, %State{path: path} = state) do
case Parser.from_file(path) do
{:ok, parser} -> {:ok, %State{state | parser: parser}}
{:error, _} = error -> {error, state}
end
end
@impl true
def handle_prepared_to_stopped(_context, %State{parser: parser} = state) do
Parser.destroy(parser)
{:ok, %State{state | parser: nil}}
end
@impl true
def handle_demand(:output, size, :buffers, _ctx, state) do
%State{parser: parser, transformer: transformer} = state
case fetch_packets(size, parser, transformer) do
{:error, _} = error ->
{error, state}
result ->
{{:ok, pack_fetched_packets(result)}, state}
end
end
@spec default_transformer(Packet.t()) :: Buffer.t()
def default_transformer(%ExPcap.Packet{parsed_packet_data: {_, payload}}),
do: %Buffer{payload: payload}
# Note: Will return buffers in reversed order
defp fetch_packets(count, parser, transformer, acc \\ [])
defp fetch_packets(0, _, _, acc), do: acc
defp fetch_packets(count, parser, transformer, acc) do
case @next_packet.(parser) do
{:error, _, _} ->
{:error, :unparsable_data}
{:ok, :eof} ->
{:eof, acc}
{:ok, %Packet{} = packet} ->
case transformer.(packet) do
nil ->
fetch_packets(count, parser, transformer, acc)
buffer ->
fetch_packets(count - 1, parser, transformer, [buffer | acc])
end
end
end
defp pack_fetched_packets(result)
defp pack_fetched_packets({:eof, []}), do: [end_of_stream: :output]
defp pack_fetched_packets({:eof, buffers}),
do: pack_fetched_packets(buffers) ++ pack_fetched_packets({:eof, []})
defp pack_fetched_packets(buffers) when is_list(buffers),
do: [buffer: {:output, Enum.reverse(buffers)}]
end
|
lib/pcap/source.ex
| 0.90939
| 0.401453
|
source.ex
|
starcoder
|
defmodule VintageNet.IP.DnsdConfig do
@moduledoc """
This is a helper module for VintageNet.Technology implementations that use
the Busybox DNS server.
DNS functionality is only supported for IPv4 configurations using static IP
addresses.
DNS server parameters are:
* `:port` - The port to use (defaults to 53)
* `:ttl` - DNS record TTL in seconds (defaults to 120)
* `:records` - DNS A records (required)
The `:records` option is a list of name/IP address tuples. For example:
```
[{"example.com", {1, 2, 3, 4}}]
```
Only IPv4 addresses are supported. Addresses may be specified as strings or
tuples, but will be normalized to tuple form before being applied.
"""
alias VintageNet.{Command, IP}
alias VintageNet.Interface.RawConfig
@doc """
Normalize the DNSD parameters in a configuration.
"""
@spec normalize(map()) :: map()
def normalize(%{ipv4: %{method: :static}, dnsd: dnsd} = config) do
# Normalize IP addresses
new_dnsd =
dnsd
|> Map.update(:records, [], &normalize_records/1)
|> Map.take([
:records,
:port,
:ttl
])
%{config | dnsd: new_dnsd}
end
def normalize(%{dnsd: _something_else} = config) do
# DNSD won't be started if not an IPv4 static configuration
Map.drop(config, [:dnsd])
end
def normalize(config), do: config
defp normalize_records(records) do
Enum.map(records, &normalize_record/1)
end
defp normalize_record({name, ipa}) do
{name, IP.ip_to_tuple!(ipa)}
end
@doc """
Add dnsd configuration commands for running a DNSD server
"""
@spec add_config(RawConfig.t(), map(), keyword()) :: RawConfig.t()
def add_config(
%RawConfig{
ifname: ifname,
files: files,
child_specs: child_specs
} = raw_config,
%{ipv4: %{method: :static, address: address}, dnsd: dnsd_config},
opts
) do
tmpdir = Keyword.fetch!(opts, :tmpdir)
dnsd = Keyword.fetch!(opts, :bin_dnsd)
dnsd_conf_path = Path.join(tmpdir, "dnsd.conf.#{ifname}")
new_files = [{dnsd_conf_path, dnsd_contents(dnsd_config)} | files]
dnsd_args =
[
"-c",
dnsd_conf_path,
"-i",
IP.ip_to_string(address)
]
|> add_port(dnsd_config)
|> add_ttl(dnsd_config)
new_child_specs =
child_specs ++
[
Supervisor.child_spec(
{MuonTrap.Daemon,
[
dnsd,
dnsd_args,
Command.add_muon_options(stderr_to_stdout: true, log_output: :debug)
]},
id: :dnsd
)
]
%RawConfig{raw_config | files: new_files, child_specs: new_child_specs}
end
def add_config(raw_config, _config_without_dhcpd, _opts), do: raw_config
defp dnsd_contents(%{records: records}) do
Enum.map(records, &record_to_string/1)
|> IO.iodata_to_binary()
end
defp record_to_string({name, ipa}) do
"#{name} #{IP.ip_to_string(ipa)}\n"
end
defp add_port(dnsd_args, %{port: port}) do
["-p", to_string(port) | dnsd_args]
end
defp add_port(dnsd_args, _dnsd_config), do: dnsd_args
defp add_ttl(dnsd_args, %{ttl: ttl}) do
["-t", to_string(ttl) | dnsd_args]
end
defp add_ttl(dnsd_args, _dnsd_config), do: dnsd_args
end
|
lib/vintage_net/ip/dnsd_config.ex
| 0.863348
| 0.820182
|
dnsd_config.ex
|
starcoder
|
defmodule Mix.Tasks.Docs do
use Mix.Task
@shortdoc "Generate documentation for the project"
@moduledoc """
Uses ExDoc to generate a static web page from the project documentation.
## Command line options
* `--canonical`, `-n` - Indicate the preferred URL with
rel="canonical" link element, defaults to no canonical path
* `--formatter`, `-f` - Which formatters to use, "html" or
"epub", default: "html" (may be given more than once)
* `--output`, `-o` - Output directory for the generated
docs, default: `"doc"`
The command line options have higher precedence than the options
specified in your `mix.exs` file below.
## Configuration
ExDoc will automatically pull in information from your project,
like the application and version. However, you may want to set
`:name`, `:source_url` and `:homepage_url` to have a nicer output
from ExDoc, for example:
def project do
[app: :my_app,
version: "0.1.0-dev",
deps: deps(),
# Docs
name: "<NAME>",
source_url: "https://github.com/USER/PROJECT",
homepage_url: "http://YOUR_PROJECT_HOMEPAGE",
docs: [main: "MyApp", # The main page in the docs
logo: "path/to/logo.png",
extras: ["README.md"]]]
end
ExDoc also allows configuration specific to the documentation to
be set. The following options should be put under the `:docs` key
in your project's main configuration. The `:docs` options should
be a keyword list or a function returning a keyword list that will
be lazily executed.
* `:assets` - Path to a directory that will be copied as is to the "assets"
directory in the output path. Its entries may be referenced in your docs
under "assets/ASSET.EXTENSION"; defaults to no assets directory.
* `:canonical` - String that defines the preferred URL with the rel="canonical"
element; defaults to no canonical path.
* `:deps` - A keyword list application names and their documentation URL.
ExDoc will by default include all dependencies and assume they are hosted on
HexDocs. This can be overridden by your own values. Example: `[plug: "https://myserver/plug/"]`
* `:extra_section` - String that defines the section title of the additional
Markdown pages; default: "PAGES". Example: "GUIDES"
* `:extras` - List of keywords, each key must indicate the path to additional
Markdown pages, the value for each keyword (optional) gives you more control
about the PATH and the title of the output files; default: `[]`. Example:
`["README.md", "CONTRIBUTING.md": [filename: "contributing", title: "Contributing", group: "Join us!"]]`
* `:filter_prefix` - Include only modules that match the given prefix in
the generated documentation. Example: "MyApp.Core"
* `:formatters` - Formatter to use; default: ["html"],
options: "html", "epub".
* `:logo` - Path to the image logo of the project (only PNG or JPEG accepted)
The image size will be 64x64. When specified, the logo will be placed under
the "assets" directory in the output path under the name "logo" and the
appropriate extension.
* `:main` - Main page of the documentation. It may be a module or a
generated page, like "Plug" or "api-reference"; default: "api-reference".
* `:source_beam` - Path to the beam directory; default: mix's compile path.
* `:source_ref` - The branch/commit/tag used for source link inference;
default: "master".
* `:source_root` - Path to the source code root directory;
default: "." (current directory).
* `:source_url_pattern` - Public URL of the project. Derived from
project's `:source_url` and `:source_ref`. Example:
"https://github.com/USER/APP/blob/master/%{path}#L%{line}"
* `:output` - Output directory for the generated docs; default: "doc".
May be overridden by command line argument.
## Umbrella project
ExDoc can be used in an umbrella project and generates a single documentation for all child apps.
Generating documentation per each child app can be achieved by running:
mix cmd mix docs
See `mix help cmd` for more information.
"""
@doc false
def run(args, config \\ Mix.Project.config, generator \\ &ExDoc.generate_docs/3) do
Mix.Task.run "compile"
{cli_opts, args, _} = OptionParser.parse(args,
aliases: [n: :canonical, f: :formatter, o: :output],
switches: [canonical: :string, formatter: :keep, output: :string])
if args != [] do
Mix.raise "Extraneous arguments on the command line"
end
project = to_string(config[:name] || config[:app])
version = config[:version] || "dev"
options =
config
|> get_docs_opts()
|> Keyword.merge(cli_opts)
|> normalize_source_url(config)
|> normalize_source_beam(config)
|> normalize_main()
|> normalize_deps()
for formatter <- get_formatters(options) do
index = generator.(project, version, Keyword.put(options, :formatter, formatter))
log(index)
index
end
end
defp get_formatters(options) do
case Keyword.get_values(options, :formatter) do
[] -> options[:formatters] || [ExDoc.Config.default(:formatter)]
values -> values
end
end
defp get_docs_opts(config) do
docs = config[:docs]
cond do
is_function(docs, 0) -> docs.()
is_nil(docs) -> []
true -> docs
end
end
defp log(index) do
Mix.shell.info [:green, "Docs successfully generated."]
Mix.shell.info [:green, "View them at #{inspect index}."]
end
defp normalize_source_url(options, config) do
if source_url = config[:source_url] do
Keyword.put(options, :source_url, source_url)
else
options
end
end
defp normalize_source_beam(options, config) do
compile_path =
if Mix.Project.umbrella?(config) do
umbrella_compile_paths()
else
Mix.Project.compile_path
end
Keyword.put_new(options, :source_beam, compile_path)
end
defp umbrella_compile_paths do
# TODO: Use Mix.Project.apps_path when we require Elixir v1.4+
build = Mix.Project.build_path()
for %{app: app} <- Mix.Dep.Umbrella.unloaded do
Path.join([build, "lib", Atom.to_string(app), "ebin"])
end
end
defp normalize_main(options) do
main = options[:main]
cond do
is_nil(main) ->
Keyword.delete(options, :main)
is_atom(main) ->
Keyword.put(options, :main, inspect(main))
is_binary(main) ->
options
end
end
defp normalize_deps(options) do
deps =
if deps = options[:deps] do
Keyword.merge(get_deps(), deps)
else
get_deps()
end
deps =
for {app, doc} <- deps,
lib_dir = :code.lib_dir(app),
is_list(lib_dir),
do: {List.to_string(lib_dir), doc}
Keyword.put(options, :deps, deps)
end
defp get_deps do
for {key, _} <- Mix.Project.deps_paths,
_ = Application.load(key), # :ok | {:error, _}
vsn = Application.spec(key, :vsn) do
{key, "https://hexdocs.pm/#{key}/#{vsn}/"}
end
end
end
|
lib/mix/tasks/docs.ex
| 0.817429
| 0.477554
|
docs.ex
|
starcoder
|
defmodule Schocken.Game.Player do
@moduledoc false
alias __MODULE__
alias Schocken.Game.Ranking
defstruct(
name: "",
current_toss: %{dices: nil,
dices_out: nil,
one_toss: nil,
promote: :zero,
tries: 0,
score: nil
},
first_player: false,
num_coaster: 0,
state: :ready,
lost_half: false
)
@type t :: %Player{
name: String.t(),
current_toss: current_toss,
first_player: boolean,
num_coaster: integer,
state: :ready | :finished | :out,
lost_half: boolean
}
@type dice :: 1..6
@type tries :: 0..3
@type score :: {integer, integer, integer}
@type current_toss :: %{
dices: [dice],
dices_out: [dice],
one_toss: boolean,
promote: :zero | :one | :two,
tries: tries,
score: score
}
@doc """
Returns a new player struct
"""
def new(name) do
%Player{name: name}
|> roll_dices(:all)
end
@doc """
roll choices
update tries, one_toss
call Ranking.evaluate
return updated player
"""
@spec roll_dices(Player, List | :all | 1..6) :: Player
def roll_dices(%Player{current_toss: current_toss} = player, choices) do
current_toss =
current_toss
|> update_dices_out(choices)
|> do_roll_dices(choices)
|> update_tries()
|> Ranking.evaluate()
%Player{player | current_toss: current_toss}
end
@doc """
update the coaster of the player. +for add -for substract
"""
@spec update_coaster(t, integer) :: {:ok | :out, t}
def update_coaster(%Player{num_coaster: num_coaster} = player, number) do
new_value = num_coaster + number
cond do
new_value > 0 ->
%Player{player | num_coaster: new_value}
true ->
%Player{player | num_coaster: 0}
end
end
@spec update_dices_out(current_toss, [dice]) :: current_toss
defp update_dices_out(current_toss, choices) do
dices = current_toss.dices
dices_out = cond do
is_number(choices) -> dices -- [choices]
is_list(choices) -> dices -- choices
true -> []
end
%{current_toss | dices_out: dices_out}
end
@spec do_roll_dices(current_toss, List | :all | 1..6) :: current_toss
defp do_roll_dices(_current_toss, _choices)
defp do_roll_dices(current_toss, choice) when is_number(choice) do
current_toss
|> reroll_dice(choice)
|> Map.put(:one_toss, false)
end
defp do_roll_dices(current_toss, choices) when length(choices) == 2 do
six_is_out? = (Enum.count(current_toss.dices, &(&1 == 6)) - Enum.count(choices, &(&1 == 6))) == 1
Enum.reduce(choices, current_toss, fn choice, toss ->
reroll_dice(toss, choice)
end)
|> Map.put(:one_toss, false)
|> update_promote(six_is_out?)
end
defp do_roll_dices(current_toss, choices) when length(choices) == 3 do
do_roll_dices(current_toss, :all)
end
defp do_roll_dices(current_toss, :all) do
%{current_toss | dices: [toss(), toss(), toss()], one_toss: true}
|> update_promote(false) # I know there is a cleaner way, but it works...
end
@spec reroll_dice(current_toss, 1..6) :: current_toss
defp reroll_dice(%{dices: dices} = current_toss, choice) do
index = Enum.find_index(dices, fn dice -> dice == choice end)
%{current_toss | dices: List.replace_at(dices, index, toss())}
end
@spec update_tries(current_toss) :: current_toss
defp update_tries(%{tries: tries} = current_toss) do
%{current_toss | tries: tries + 1}
end
@spec update_promote(current_toss, boolean) :: current_toss
defp update_promote(current_toss, six_is_out?) do
number_of_sixes = Enum.count(current_toss.dices, &(&1 == 6)) - if six_is_out?, do: 1, else: 0
case number_of_sixes do
3 ->
%{current_toss | promote: :two}
2 ->
%{current_toss | promote: :one}
_ ->
%{current_toss | promote: :zero}
end
end
@spec toss() :: 1..6
defp toss(), do: Enum.random(1..6)
end
|
lib/schocken/game/player.ex
| 0.789437
| 0.416559
|
player.ex
|
starcoder
|
defmodule Patch.Listener do
use GenServer
@default_capture_replies true
@default_timeout 5000
@typedoc """
Listeners are started with a tag so the listening process can differentiate
between multiple listeners.
"""
@type tag :: atom()
@typedoc """
Listeners listen to a target.
"""
@type target :: GenServer.server()
@typedoc """
Option to control whether or not to capture GenServer.call replies.
Defaults to #{@default_capture_replies}
"""
@type capture_replies_option :: {:capture_replies, boolean()}
@typedoc """
Option to control how long the listener should wait for GenServer.call
Value is either the number of milliseconds to wait or the `:infinity` atom.
If `capture_replies` is set to false this setting has no effect.
Defaults to #{@default_timeout}
"""
@type timeout_option :: {:timeout, timeout()}
@typedoc """
Sum-type of all valid options
"""
@type option :: capture_replies_option() | timeout_option()
@type t :: %__MODULE__{
capture_replies: boolean(),
recipient: pid(),
tag: atom(),
target: pid(),
timeout: timeout()
}
defstruct [:capture_replies, :recipient, :tag, :target, :timeout]
## Client
def child_spec(args) do
recipient = Keyword.fetch!(args, :recipient)
tag = Keyword.fetch!(args, :tag)
target = Keyword.fetch!(args, :target)
options = Keyword.get(args, :options, [])
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [recipient, tag, target, options]},
restart: :temporary
}
end
@spec start_link(recipient :: atom(), tag :: tag(), target :: pid() | atom(), [option()]) ::
{:ok, pid()} | {:error, :not_found}
def start_link(recipient, tag, target, options \\ [])
def start_link(recipient, tag, target, options) when is_atom(target) do
case Process.whereis(target) do
nil ->
{:error, :not_found}
pid ->
true = Process.unregister(target)
{:ok, listener} = start_link(recipient, tag, pid, options)
Process.register(listener, target)
{:ok, listener}
end
end
def start_link(recipient, tag, target, options) when is_pid(target) do
capture_replies = Keyword.get(options, :capture_replies, @default_capture_replies)
timeout = Keyword.get(options, :timeout, @default_timeout)
state = %__MODULE__{
capture_replies: capture_replies,
recipient: recipient,
tag: tag,
target: target,
timeout: timeout
}
GenServer.start_link(__MODULE__, state)
end
def target(listener) do
GenServer.call(listener, {__MODULE__, :target})
end
## Server
@spec init(t()) :: {:ok, t()}
def init(%__MODULE__{} = state) do
Process.monitor(state.target)
{:ok, state}
end
def handle_call({__MODULE__, :target}, _from, state) do
{:reply, state.target, state}
end
def handle_call(message, from, %__MODULE__{capture_replies: false} = state) do
send(state.recipient, {state.tag, {GenServer, :call, message, from}})
send(state.target, {:"$gen_call", from, message})
{:noreply, state}
end
def handle_call(message, from, state) do
send(state.recipient, {state.tag, {GenServer, :call, message, from}})
try do
response = GenServer.call(state.target, message, state.timeout)
send(state.recipient, {state.tag, {GenServer, :reply, response, from}})
{:reply, response, state}
catch
:exit, {reason, _call} ->
send(state.recipient, {state.tag, {:EXIT, reason}})
Process.exit(self(), reason)
end
end
def handle_cast(message, state) do
send(state.recipient, {state.tag, {GenServer, :cast, message}})
GenServer.cast(state.target, message)
{:noreply, state}
end
def handle_info({:DOWN, _, :process, pid, reason}, %__MODULE__{target: pid} = state) do
send(state.recipient, {state.tag, {:DOWN, reason}})
{:stop, {:shutdown, {:DOWN, reason}}, state}
end
def handle_info(message, state) do
send(state.recipient, {state.tag, message})
send(state.target, message)
{:noreply, state}
end
end
|
lib/patch/listener.ex
| 0.775647
| 0.41567
|
listener.ex
|
starcoder
|
defmodule Day13 do
@moduledoc """
Documentation for Day13.
"""
def part1 do
{start, buses} = read_data("input.txt")
{departure_time, departing_buses} =
next_departure(start, buses |> Enum.filter(fn bus -> bus != -1 end))
IO.puts("Next departure time: #{departure_time} buses #{Enum.join(departing_buses, ",")}")
IO.puts("#{departure_time - start} minutes from now")
end
def part2 do
read_data("input.txt")
|> elem(1)
|> find_sequenced_departures_2()
|> IO.puts()
end
def read_data(filename) do
lines = File.stream!(filename) |> Stream.map(&String.trim/1) |> Enum.to_list()
{lines |> List.first() |> String.to_integer(),
lines
|> List.last()
|> String.split(",")
|> Enum.map(fn id ->
case Integer.parse(id) do
{int, _bin} -> int
:error -> -1
end
end)}
end
def departures_at(time, buses) do
buses |> Enum.map(fn bus -> rem(time, bus) == 0 end)
end
def find_next_departure(time, buses) do
Stream.iterate(time, &(&1 + 1))
|> Stream.map(fn timeslot ->
{timeslot, departures_at(timeslot, buses)}
end)
|> Stream.filter(fn {_ts, departures} ->
Enum.any?(departures)
end)
|> Stream.take(1)
|> Enum.to_list()
|> List.first()
end
def next_departure(time, buses) do
{time, departing} = find_next_departure(time, buses)
{time,
departing
|> Enum.zip(buses)
|> Enum.filter(fn {departs, _bus} -> departs end)
|> Enum.map(fn x -> elem(x, 1) end)}
end
def sequenced_departures(time, buses) do
buses
|> Enum.map(fn
{bus, offset} -> rem(time + offset, bus) == 0
end)
end
def find_sequenced_departures(buses) do
first_bus = Enum.at(buses, 0)
buses_idx = filter_and_add_offsets(buses)
Stream.iterate(first_bus, fn time -> time + first_bus end)
|> Stream.map(fn time -> {time, sequenced_departures(time, buses_idx)} end)
|> Stream.filter(fn {_t, depart_list} -> depart_list |> Enum.all?() end)
|> Stream.take(1)
|> Enum.to_list()
|> List.first()
|> elem(0)
end
def filter_and_add_offsets(buses) do
buses |> Enum.with_index() |> Enum.filter(fn {x, _} -> x != -1 end)
end
def search(indexed_buses, start_time, skip_factor, bus_count) do
rendezvous =
Stream.iterate(start_time, fn time -> time + skip_factor end)
|> Stream.map(fn time ->
{time, sequenced_departures(time, indexed_buses |> Enum.slice(0, bus_count))}
end)
|> Stream.filter(fn {_t, depart_list} -> depart_list |> Enum.all?() end)
|> Stream.take(1)
|> Enum.to_list()
|> List.first()
|> elem(0)
if bus_count == length(indexed_buses) do
rendezvous
else
search(
indexed_buses,
rendezvous,
BasicMath.lcm(
indexed_buses
|> Enum.slice(0, bus_count)
|> Enum.map(fn b -> elem(b, 0) end)
),
bus_count + 1
)
end
end
def find_sequenced_departures_2(buses) do
first_bus = Enum.at(buses, 0)
filter_and_add_offsets(buses) |> search(first_bus, first_bus, 2)
end
end
|
day13/lib/day13.ex
| 0.575946
| 0.421016
|
day13.ex
|
starcoder
|
defmodule Day4 do
def solve do
input = prepare_input()
start = System.monotonic_time(unquote(:milli_seconds))
IO.puts("Part one answer:")
input
|> Day4.Part1.solve()
|> IO.inspect()
time_part_one = System.monotonic_time(unquote(:milli_seconds)) - start
IO.puts("Part one took #{time_part_one} milliseconds")
start = System.monotonic_time(unquote(:milli_seconds))
IO.puts("Part two answer:")
input
|> Day4.Part2.solve()
|> IO.puts()
time_part_two = System.monotonic_time(unquote(:milli_seconds)) - start
IO.puts("Part two took #{time_part_two} milliseconds")
IO.puts("Total run time #{time_part_one + time_part_two} milliseconds")
end
defp prepare_input do
"../../inputFiles/day4/input.txt"
|> File.stream!()
|> Stream.map(&String.trim_trailing/1)
|> Enum.to_list()
end
def parse_inputs(input) do
input
|> Enum.sort()
|> Enum.map(fn x ->
[time, action] =
x
|> String.trim_leading("[")
|> String.split("] ")
%{time: time, action: action}
end)
|> Enum.reduce(%{}, fn x, acc ->
[first, second | _] = String.split(x[:action], " ")
updated =
case first do
"Guard" ->
guard_id =
second
|> String.trim_leading("#")
|> String.to_integer()
acc
|> Map.put_new(guard_id, [])
|> Map.put(:current_guard, guard_id)
# Start counting minutes
"falls" ->
start_minute =
x[:time]
|> String.split(":")
|> List.last()
|> String.to_integer()
acc
|> Map.put(:current_start, start_minute)
# Stop counting minutes
"wakes" ->
start_minute = acc[:current_start]
end_minute =
x[:time]
|> String.split(":")
|> List.last()
|> String.to_integer()
minutes = Enum.reduce(start_minute..(end_minute - 1), [], fn x, acc -> [x | acc] end)
current_list = Map.get(acc, acc[:current_guard])
Map.put(acc, acc[:current_guard], List.flatten([minutes | current_list]))
end
updated
end)
|> Enum.filter(fn {key, _} ->
case key do
:current_guard -> false
:current_start -> false
_ -> true
end
end)
end
def choose_guard(input, pick_best) do
input
|> Enum.reduce(%{chosen_guard: nil}, fn x, acc ->
case acc[:chosen_guard] do
nil -> Map.put(acc, :chosen_guard, x)
_ -> Map.put(acc, :chosen_guard, pick_best.(acc, x))
end
end)
|> Enum.reduce(0, fn x, _ ->
{:chosen_guard, value} = x
{guard_id, minutes} = value
best_minutes = best_minutes(minutes)
{minute, _} = best_minutes
guard_id * minute
end)
end
def best_minutes(minutes) do
Enum.group_by(minutes, fn x -> x end)
|> Enum.sort(fn {_, value1}, {_, value2} -> Enum.count(value1) >= Enum.count(value2) end)
|> List.first()
end
end
defmodule Day4.Part1 do
def solve(input) do
input
|> Day4.parse_inputs()
|> Day4.choose_guard(&pick_best/2)
end
defp pick_best(previous, current) do
{_, pvalue} = previous[:chosen_guard]
{_, value} = current
if Enum.count(pvalue) > Enum.count(value) do
previous[:chosen_guard]
else
current
end
end
end
defmodule Day4.Part2 do
def solve(input) do
input
|> Day4.parse_inputs()
|> Day4.choose_guard(&pick_best/2)
end
defp pick_best(previous, current) do
{_, pvalue} = previous[:chosen_guard]
{_, value} = current
pminutes = case Day4.best_minutes(pvalue) do
{_, values} -> values
_ -> []
end
minutes = case Day4.best_minutes(value) do
{_, values} -> values
_ -> []
end
if Enum.count(pminutes) > Enum.count(minutes) do
previous[:chosen_guard]
else
current
end
end
end
|
elixir/day4/lib/day4.ex
| 0.553023
| 0.519826
|
day4.ex
|
starcoder
|
defmodule Kitt.Message.BSM do
@moduledoc """
Defines the structure and instantiation function
for creating a J2735-compliant BasicSafetyMessage
A `BSM` defines the basic interchange of presence data
between DSRC-capable vehicles and infrastructure
"""
defmodule CoreData do
@moduledoc """
Defines the structure and instantiation function
for creating a J2735-compliant BSMCoreData data element
"""
@typedoc "Defines the structure of a BSMCoreData data element and the data elements comprising its fields"
@type t :: %__MODULE__{
msgCnt: non_neg_integer(),
id: non_neg_integer(),
secMark: non_neg_integer(),
lat: integer(),
long: integer(),
elev: integer(),
accuracy: Kitt.Types.positional_accuracy(),
transmission: Kitt.Types.transmission_state(),
speed: non_neg_integer(),
heading: non_neg_integer(),
angle: integer(),
accelSet: Kitt.Types.acceleration_set_4_way(),
brakes: Kitt.Types.brake_system_status(),
size: Kitt.Types.vehicle_size()
}
@derive Jason.Encoder
@enforce_keys [
:msgCnt,
:id,
:secMark,
:lat,
:long,
:elev,
:accuracy,
:transmission,
:speed,
:heading,
:angle,
:accelSet,
:brakes,
:size
]
defstruct [
:msgCnt,
:id,
:secMark,
:lat,
:long,
:elev,
:accuracy,
:transmission,
:speed,
:heading,
:angle,
:accelSet,
:brakes,
:size
]
@doc """
Produces a `CoreData` message struct from an equivalent map or keyword input.
"""
@spec new(map() | keyword()) :: CoreData.t()
def new(core_data), do: struct(__MODULE__, core_data)
end
@typedoc "Defines the BasicSafetyMessage type and the data elements comprising its component fields"
@type t :: %__MODULE__{
coreData: CoreData.t(),
partII: [partIIcontent()],
regional: [map()]
}
@type partIIcontent :: %{
"partII-Id": non_neg_integer(),
"partII-Value": partIIextension()
}
@type vehicle_safety_extensions :: %{
events: Kitt.Types.event_flag(),
pathHistory: Kitt.Types.path_history(),
pathPrediction: Kitt.Types.path_prediction(),
lights: lights_state()
}
@type lights_state ::
:lowBeamHeadlightsOn
| :highBeamHeadlightsOn
| :leftTurnSignalOn
| :rightTurnSignalOn
| :hazardSignalOn
| :automaticLightControlOn
| :daytimeRunningLightsOn
| :fogLightOn
| :parkingLightsOn
@type special_vehicle_extensions :: %{
vehicleAlerts: Kitt.Types.emergency_details(),
description: Kitt.Types.event_description(),
trailers: Kitt.Types.trailer_data()
}
@type supplemental_vehicle_extensions :: %{
classification: non_neg_integer(),
classDetails: Kitt.Types.vehicle_classification(),
vehicleData: Kitt.Types.vehicle_data(),
weatherReport: Kitt.Types.weather_report(),
weatherProbe: Kitt.Types.weather_probe(),
obstacle: Kitt.Types.obstacle_detection(),
status: Kitt.Types.disabled_vehicle(),
speedProfile: Kitt.Types.speed_profile(),
theRTCM: Kitt.Types.rtcm_package(),
regional: [map()]
}
@type partIIextension ::
vehicle_safety_extensions()
| special_vehicle_extensions()
| supplemental_vehicle_extensions()
@derive Jason.Encoder
@enforce_keys [:coreData]
defstruct [:coreData, :partII, :regional]
@doc """
Produces a `BSM` message struct from an equivalent map or keyword input.
The `coreData` primary field is instantiated as a `CoreData` struct recursively
"""
@spec new(map() | keyword()) :: t()
def new(message) do
{_, core_data_struct} =
Map.get_and_update!(message, :coreData, fn core_data ->
{core_data, CoreData.new(core_data)}
end)
struct(__MODULE__, core_data_struct)
end
@doc """
Returns the `BSM` identifying integer
"""
@spec type_id() :: non_neg_integer()
def type_id(), do: :DSRC.basicSafetyMessage()
@doc """
Returns the `BSM` identifying atom recognized by the ASN1 spec
"""
@spec type() :: atom()
def type(), do: :BasicSafetyMessage
end
|
lib/kitt/message/bsm.ex
| 0.823293
| 0.671065
|
bsm.ex
|
starcoder
|
defmodule BubbleMatch.Token do
@moduledoc """
A token is a single word or a part of the sentence. A sentence is a sequence of tokens.
Each token contains information and metadata that is used to match
sentences on, and to extract information from.
"""
@typedoc """
Tokens contain the following fields:
* `raw` - the raw text value of the token, including any surrounding
whitespace.
* `value` - the normalized value of the token. In the case of word
tokens, this is usually the normalized, lowercased version of the
word. In the case of entities, this value holds a map with keys
`kind`, `provider` and `value`.
* `start` - the start index; where in the original sentence the
token starts.
* `end` - the end index; where in the original sentence the
token ends.
* `index` - the (zero-based) token index number; 0 if it's the first
token, 1 if it's the second, etc.
* `type` - the type of the token; an atom, holding either `:entity`,
`:spacy`, `:naive`, depending on the way the token was
originally created.
"""
@type t :: %__MODULE__{}
use BubbleLib.DslStruct,
raw: nil,
value: nil,
start: nil,
end: nil,
type: nil,
index: nil
alias BubbleMatch.{Entity, Unidekode}
alias __MODULE__, as: M
@emoji Unicode.Regex.compile!("^[[:Emoji:]]$")
@doc """
Given a single token in Spacy's JSON format, convert it into a token.
"""
@spec from_spacy(spacy_json_token :: map()) :: t()
def from_spacy(t) do
value =
Map.take(t, ~w(lemma pos norm tag))
|> Enum.map(fn {k, v} -> {k, Unidekode.to_ascii(v)} end)
|> Map.new()
value =
if Regex.match?(@emoji, t["string"]) do
value
|> Map.put("pos", "EMOJI")
|> Map.put("emoji", t["string"])
else
value
end
%M{
type: :spacy,
value: value,
raw: t["string"],
index: t["id"],
start: t["start"],
end: t["end"]
}
end
@doc """
Test whether a token is punctuation
"""
def punct?(%M{type: :punct}) do
true
end
def punct?(token) do
pos?(token, "PUNCT") || pos?(token, "SYM") || pos?(token, "EMOJI")
end
@doc """
Test whether a token mathces the given POS (part-of-speech) tag.
"""
def pos?(%M{type: :spacy, value: %{"pos" => tag}}, tag) do
true
end
def pos?(%M{type: :spacy, value: %{"tag" => tag}}, tag) do
true
end
def pos?(_, _) do
false
end
@doc """
Test whether a token matches the given (optionally normalized) word.
"""
def word?(%M{type: :spacy} = t, word) do
t.value["norm"] == word || t.value["lemma"] == word
end
def word?(%M{} = t, word) do
t.value == word || t.raw == word
end
@doc """
Test whether a token is an entity of the given kind.
"""
def entity?(%M{} = t, kind) do
t.type == :entity and t.value.kind == kind
end
@doc """
Constructs a token from a Spacy entity definition
"""
def from_spacy_entity(spacy_entity_json, sentence_text) do
{start, end_} = {spacy_entity_json["start"], spacy_entity_json["end"]}
raw = String.slice(sentence_text, start, end_ - start)
entity = Entity.new("spacy", Inflex.underscore(spacy_entity_json["label"]), raw, raw)
%M{
type: :entity,
value: entity,
start: start,
end: end_,
raw: raw
}
end
@doc """
Constructs a token from a Duckling entity definition
"""
def from_duckling_entity(duckling_entity) do
{start, end_} = {duckling_entity["start"], duckling_entity["end"]}
value = duckling_entity["value"]["value"]
raw = duckling_entity["body"]
extra = duckling_entity["value"] |> Map.delete("value")
entity = Entity.new("duckling", Inflex.underscore(duckling_entity["dim"]), value, raw, extra)
%M{
type: :entity,
value: entity,
start: start,
end: end_,
raw: duckling_entity["body"]
}
end
@doc """
Get the base form of the given string; the downcased, ASCII version.
"""
def base_form(str) do
str
|> String.trim()
|> String.downcase()
|> Unidekode.drop_accented()
|> String.replace("’", "'")
|> String.replace("ʼ", "'")
end
end
defimpl String.Chars, for: BubbleMatch.Token do
def to_string(%BubbleMatch.Token{raw: raw}), do: raw
end
require BubbleLib.DslStruct
BubbleLib.DslStruct.jason_derive(BubbleMatch.Token)
|
lib/bubble_match/token.ex
| 0.877424
| 0.763528
|
token.ex
|
starcoder
|
defmodule Noizu.DomainObject do
@doc """
Setup Base Domain Object, this struct will in turn hold Entity, Repo, Index, etc.
## See
- `Noizu.AdvancedScaffolding.Internal.Core.Base.Behaviour`
- `Noizu.AdvancedScaffolding.Internal.Persistence.Base.Behaviour`
- `Noizu.AdvancedScaffolding.Internal.EntityIndex.Base.Behaviour`
- `Noizu.AdvancedScaffolding.Internal.Json.Base.Behaviour`
## Example
```elixir
defmodule User do
use Noizu.DomainObject
Noizu.DomainObject.noizu_entity() do
public_field :name
end
end
```
"""
defmacro __using__(options \\ nil) do
#options = Macro.expand(options, __ENV__)
quote do
use Noizu.AdvancedScaffolding.Internal.DomainObject.Base, unquote(options)
end
end
#--------------------------------------------
# noizu_entity
#--------------------------------------------
@doc """
Initialize a DomainObject.Entity. Caller passes in identifier and field definitions which are in turn used to generate the domain object entity's configuration options and defstruct statement.
## See
- `Noizu.AdvancedScaffolding.Internal.Core.Entity.Behaviour`
- `Noizu.AdvancedScaffolding.Internal.Persistence.Entity.Behaviour`
- `Noizu.AdvancedScaffolding.Internal.EntityIndex.Entity.Behaviour`
- `Noizu.AdvancedScaffolding.Internal.Index.Behaviour`
- `Noizu.AdvancedScaffolding.Internal.Json.Entity.Behaviour`
## Example
```elixir
defmodule User do
use Noizu.DomainObject
Noizu.DomainObject.noizu_entity() do
public_field :name
end
end
```
"""
defmacro noizu_entity(options \\ [], [do: block]) do
#options = Macro.expand(options, __ENV__)
Noizu.AdvancedScaffolding.Internal.DomainObject.Entity.__noizu_entity__(__CALLER__, options, block)
end
#--------------------------------------------
# noizu_table
#--------------------------------------------
@doc """
Inject Scaffolding fields into a Ecto.Table entity.
"""
defmacro noizu_table(options \\ []) do
#options = Macro.expand(options, __ENV__)
Noizu.AdvancedScaffolding.Internal.DomainObject.Table.__noizu_table__(__CALLER__, options)
end
#--------------------------------------------
# noizu_schema_info
#--------------------------------------------
@doc """
Configure your DomainObject Schema module. Provides runtime compiled list of modules, sref mapping/Noizu.ERP String.t support, etc.
"""
defmacro noizu_schema_info(options \\ [], [do: block]) do
#options = Macro.expand(options, __ENV__)
Noizu.DomainObject.SchemaInfo.__noizu_schema_info__(__CALLER__, options, block)
end
#--------------------------------------------
# noizu_type_handler
#--------------------------------------------
@doc """
Type Handler Behavior. Used for casting/loading embedded fields into their persistence layer format.
For example domain objects may include a TimeStamp field.
```elixir
defmodule Entity do
@universal_identifier true
Noizu.DomainObject.noizu_entity do
@index true
public_field :my_image_update, nil, Noizu.Scaffolding.V3.TimeStamp.TypeHandler
end
end
```
Where the time stamp field contains a created_on, modified_on, deleted_on field. When casting to an Ecto database the nested structure can be replaced
with `my_image_update_created_on` ,`my_image_update_modified_on` ,and `my_image_update_deleted_on` which would match DateTime fields in our Ecto Table schema.
```elixir
defmodule Noizu.DomainObject.TimeStamp.Second do
use Noizu.SimpleObject
@vsn 1.0
Noizu.SimpleObject.noizu_struct() do
date_time_handler = Application.get_env(:noizu_advanced_scaffolding, :data_time_handler, Noizu.DomainObject.DateTime.Second.TypeHandler)
public_field :created_on, nil, date_time_handler
public_field :modified_on, nil, date_time_handler
public_field :deleted_on, nil, date_time_handler
end
#...
```
"""
defmacro noizu_type_handler(options \\ []) do
#options = Macro.expand(options, __ENV__)
Noizu.DomainObject.TypeHandler.__noizu_type_handler__(__CALLER__, options)
end
#--------------------------------------------
# noizu_sphinx_handler
#--------------------------------------------
@doc """
Similar to type handler, but responsible for casting fields to a sphinx index record.
In addition the field expansion support like in our type handler behaviour it also provides default values, field type (:attr_unit, :attr_multi_64, :field), and bit width (for int fields).
"""
defmacro noizu_sphinx_handler(options \\ []) do
#options = case options do
# [] -> []
# _ -> Macro.expand(options, __ENV__)
# end
Noizu.DomainObject.SearchIndexHandler.__noizu_sphinx_handler__(__CALLER__, options)
end
#--------------------------------------------
# noizu_index
#--------------------------------------------
@doc """
Module for handling saving to/ updating/tracking and creating Sphinx record types.
Provides methods for creating xml schema definitions, real time definitions, config snippets,
internal book keeping (for tracking if a record is realtime, delta, primary index), etc.
"""
defmacro noizu_index(options \\ [], [do: block]) do
#options = Macro.expand(options, __ENV__)
Noizu.AdvancedScaffolding.Internal.DomainObject.Index.__noizu_index__(__CALLER__, options, block)
end
#--------------------------------------------
# noizu_repo
#--------------------------------------------
@doc """
Provides scaffolding for a DomainObject.Repo module. If used with no options this behavior wll provide everything needed for basic crud. get/cache/update/delete as well as
providing by default a simple repo structure %Repo{ entities: [], length: 0} which may be used to pass round specific sets of records or as an embed option for domain objects
provided ta TypeHandler and optional SphinxHandler is provided.
## Example
```elixir
defmodule MyApp.MyDomainObject do
...
defmodule Repo do
Noizu.DomainObject.noizu_repo do
end
end
defmodule Repo.TypeHandler do
require Noizu.DomainObject
Noizu.DomainObject.noizu_type_handler()
end
def pre_create_callback(field, entity, context, options) do
# a domain object included a Repo set of entities of type MyApp.DomainObject.Entity. From this callback we may write each of these to a 12m table for our entity.
super(field, entity, context, options)
end
end
```
"""
defmacro noizu_repo(options \\ [], [do: block]) do
#options = Macro.expand(options, __ENV__)
Noizu.AdvancedScaffolding.Internal.DomainObject.Repo.__noizu_repo__(__CALLER__, options, block)
end
end
|
lib/scaffolding/domain_object.ex
| 0.682997
| 0.663399
|
domain_object.ex
|
starcoder
|
defmodule Scrivener.Headers do
@moduledoc """
Helpers for paginating API responses with [Scrivener](https://github.com/drewolson/scrivener) and HTTP headers. Implements [RFC-5988](https://mnot.github.io/I-D/rfc5988bis/), the proposed standard for Web linking.
Use `paginate/2` to set the pagination headers:
def index(conn, params) do
page = MyApp.Person
|> where([p], p.age > 30)
|> order_by([p], desc: p.age)
|> preload(:friends)
|> MyApp.Repo.paginate(params)
conn
|> Scrivener.Headers.paginate(page)
|> render("index.json", people: page.entries)
end
"""
import Plug.Conn, only: [put_resp_header: 3]
@doc """
Add HTTP headers for a `Scrivener.Page`.
"""
@spec paginate(Plug.Conn.t, Scrivener.Page.t) :: Plug.Conn.t
def paginate(conn, page) do
uri = %URI{scheme: Atom.to_string(conn.scheme),
host: conn.host,
port: conn.port,
path: conn.request_path,
query: conn.query_string}
conn
|> put_resp_header(Application.get_env(:scrivener_headers_json, :link), build_link_header(uri, page))
|> put_resp_header(Application.get_env(:scrivener_headers_json, :total), Integer.to_string(page.total_entries))
|> put_resp_header(Application.get_env(:scrivener_headers_json, :per_page), Integer.to_string(page.page_size))
|> put_resp_header(Application.get_env(:scrivener_headers_json, :total_pages), Integer.to_string(page.total_pages))
|> put_resp_header(Application.get_env(:scrivener_headers_json, :page_number), Integer.to_string(page.page_number))
end
@spec build_link_header(URI.t, Scrivener.Page.t) :: String.t
defp build_link_header(uri, page) do
map = %{}
map
|> Map.put("first", link_str(uri, 1))
|> Map.put("last", link_str(uri, page.total_pages))
|> maybe_add_prev(uri, page.page_number, page.total_pages)
|> maybe_add_next(uri, page.page_number, page.total_pages)
|> Poison.encode!
end
defp link_str(%{query: req_query} = uri, page_number) do
query =
req_query
|> URI.decode_query()
|> Map.put("page", page_number)
|> URI.encode_query()
uri_str =
%URI{uri | query: query}
|> URI.to_string()
uri_str
end
defp maybe_add_prev(links, uri, page_number, total_pages) when 1 < page_number and page_number <= total_pages do
Map.put(links, "prev", link_str(uri, page_number - 1))
end
defp maybe_add_prev(links, _uri, _page_number, _total_pages) do
links
end
defp maybe_add_next(links, uri, page_number, total_pages) when 1 <= page_number and page_number < total_pages do
Map.put(links, "next", link_str(uri, page_number + 1))
end
defp maybe_add_next(links, _uri, _page_number, _total_pages) do
links
end
end
|
lib/scrivener/headers.ex
| 0.816443
| 0.416559
|
headers.ex
|
starcoder
|
defmodule Schedules.RepoCondensed do
@moduledoc """
An alternate way to fetch schedules that is more light weight and easier to cache.
This uses a longer than usual timeout for initial caching as sometime (especially in dev)
it may take a long time to warm the cache.
"""
import Kernel, except: [to_string: 1]
use RepoCache, ttl: :timer.hours(1)
alias Routes.Route
alias Schedules.{Parser, Repo, ScheduleCondensed}
alias Stops.Repo, as: StopsRepo
alias V3Api.Schedules, as: SchedulesApi
# the long timeout is to address a worst-case scenario of cold schedule cache
@long_timeout 15_000
@default_params [
include: "trip",
"fields[schedule]":
"departure_time,arrival_time,drop_off_type,pickup_type,stop_sequence,timepoint",
"fields[trip]": "name,headsign,direction_id,bikes_allowed"
]
@spec by_route_ids([Route.id_t()], Keyword.t()) :: [ScheduleCondensed.t()] | {:error, any}
def by_route_ids(route_ids, opts \\ []) when is_list(route_ids) do
opts = Keyword.put_new(opts, :date, Util.service_date())
@default_params
|> Keyword.put(:route, Enum.join(route_ids, ","))
|> Keyword.put(:date, opts |> Keyword.fetch!(:date) |> to_string())
|> add_optional_param(opts, :direction_id)
|> add_optional_param(opts, :stop_sequences, :stop_sequence)
|> add_optional_param(opts, :stop_ids, :stop)
|> cache(&all_from_params/1, timeout: 10_000)
|> filter_by_min_time(Keyword.get(opts, :min_time))
end
@spec all_from_params(Keyword.t()) :: [Parser.record()] | {:error, any}
defp all_from_params(params) do
with %JsonApi{data: data} <- SchedulesApi.all(params) do
data = Enum.filter(data, &valid?/1)
Repo.insert_trips_into_cache(data)
data
|> Stream.map(&Parser.parse/1)
|> Enum.filter(&has_trip?/1)
|> Enum.sort_by(&DateTime.to_unix(elem(&1, 3)))
|> build_structs()
end
end
defp has_trip?({_, trip_id, _, _, _, _, _, _, _}) when is_nil(trip_id) do
false
end
defp has_trip?({_, _, _, _, _, _, _, _, _}) do
true
end
defp valid?(%JsonApi.Item{relationships: %{"trip" => [%JsonApi.Item{id: id} | _]}})
when not is_nil(id) do
true
end
defp valid?(_) do
false
end
defp add_optional_param(params, opts, key, param_name \\ nil) do
param_name = param_name || key
case Keyword.fetch(opts, key) do
{:ok, value} ->
Keyword.put(params, param_name, to_string(value))
:error ->
params
end
end
defp to_string(%Date{} = date) do
Util.convert_to_iso_format(date)
end
defp to_string(str) when is_binary(str) do
str
end
defp to_string(atom) when is_atom(atom) do
Atom.to_string(atom)
end
defp to_string(list) when is_list(list) do
list
|> Enum.map(&to_string/1)
|> Enum.join(",")
end
defp to_string(int) when is_integer(int) do
Integer.to_string(int)
end
@spec filter_by_min_time([ScheduleCondensed.t()], DateTime.t() | nil) :: [ScheduleCondensed.t()]
defp filter_by_min_time(schedules, nil) do
schedules
end
defp filter_by_min_time(schedules, %DateTime{} = min_time) do
Enum.filter(schedules, fn schedule ->
Util.time_is_greater_or_equal?(schedule.time, min_time)
end)
end
defp build_structs(schedules) do
schedules
|> Enum.map(fn {_, trip_id, stop_id, time, _, _, _, stop_sequence, _} ->
Task.async(fn ->
trip = Repo.trip(trip_id)
stop = StopsRepo.get!(stop_id)
%ScheduleCondensed{
time: time,
trip_id: trip_id,
headsign: trip.headsign,
route_pattern_id: trip.route_pattern_id,
stop_id: stop.parent_id || stop.id,
train_number: trip.name,
stop_sequence: stop_sequence
}
end)
end)
|> Enum.map(&Task.await(&1, @long_timeout))
end
end
|
apps/schedules/lib/repo_condensed.ex
| 0.730866
| 0.416589
|
repo_condensed.ex
|
starcoder
|
defmodule Clova.SkillPlug do
@behaviour Plug
@moduledoc """
This plug provides the necessary middlewear to handle a request from the Clova server, call your
`Clova` implementation, and build the HTTP response.
This plug is a convenience wrapper of four other plugs: `Plug.Parsers`, `Clova.ValidatorPlug`,
`Clova.DispatcherPlug`, and `Clova.EncoderPlug`. For most skills, this plug should be sufficient, but
for more complex skills it may be necessary to use the underlying plugs directly.
Usage:
```
plug Clova.SkillPlug,
dispatch_to: MyExtension,
app_id: "com.example.my_extension",
json_module: Poison
# (for development - see Clova.ValidatorPlug) force_signature_valid: OR public_key:
```
This is equivalent to:
```
plug Plug.Parsers,
parsers: [:json],
json_decoder: Poison,
body_reader: Clova.CachingBodyReader.spec()
plug Clova.ValidatorPlug, app_id: "com.example.my_extension"
plug Clova.DispatcherPlug, dispatch_to: MyExtension
plug Clova.EncoderPlug, json_encoder: Poison
```
## Options
The options are handled the underlying wrapped plugs. The minimal recommended options are listed here.
* `:dispatch_to` - Required. The name of your module that implements the `Clova` behaviour to handle clova requests.
* `:app_id` - Optional. The application ID as specified in the Clova Developer Center. All requests must contain this ID in the request body. If this option is not provided, the app ID validity is not checked.
* `:json_module` - The name of the module that will be used to decode and encode the JSON. Can also be in MFA format. Optional if `:json_decoder` and `:json_encoder` are provided.
* `:json_decoder` - The name of the module that will be used to decode the JSON. Can also be in MFA format. Optional if `:json_module` is provided.
* `:json_encoder` - The name of the module that will be used to encode the JSON. Can also be in MFA format. Optional if `:json_module` is provided.
"""
def init(opts) do
{
init_parser(opts),
init_encoder(opts),
Clova.ValidatorPlug.init(opts),
Clova.DispatcherPlug.init(opts)
}
end
def call(conn, {opts_parser, opts_encoder, opts_validator, opts_dispatcher}) do
with %{halted: false} = conn <- Plug.Parsers.call(conn, opts_parser),
%{halted: false} = conn <- Clova.ValidatorPlug.call(conn, opts_validator),
%{halted: false} = conn <- Clova.DispatcherPlug.call(conn, opts_dispatcher),
%{halted: false} = conn <- Clova.EncoderPlug.call(conn, opts_encoder),
do: conn
end
defp init_parser(opts) do
json_decoder = Keyword.get(opts, :json_decoder) || Keyword.get(opts, :json_module)
Plug.Parsers.init(
parsers: [:json],
json_decoder: json_decoder,
body_reader: Clova.CachingBodyReader.spec()
)
end
defp init_encoder(opts) do
json_encoder = Keyword.get(opts, :json_encoder) || Keyword.get(opts, :json_module)
Clova.EncoderPlug.init(json_encoder: json_encoder)
end
end
|
lib/clova/skill_plug.ex
| 0.864639
| 0.829492
|
skill_plug.ex
|
starcoder
|
defmodule Quadquizaminos.Hints do
def tldr(:intro) do
"""
<p>Click "How to Play" for instructions.</p>
<p>Fill all blocks in a row to clear the row.</p>
<p>Right/left arrows move quads right/left.</p>
<p>Up arrow rotates falling quad.</p>
<p>Down arrow drops falling quad.</p>
<p>Space bar pauses game and pops up quiz.</p>
"""
end
def tldr(:mobile) do
"""
<p>Click "How to Play" for instructions.</p>
<p></p>
<p>Playing without a keyboard is like
ignoring cybersecurity of your supply chain.
It quickly leads to bankruptcy!</p>
<p></p>
"""
end
def tldr(:quiz) do
"""
<p>Click "How to Play" for instructions.</p>
<p>Space bar pops up quiz.</p>
<p>Answer questions for points and powerups.</p>
<p>Powerups let you delete blocks, add blocks, move blocks,
prevent attacks, prevent lawsuits, etc</p>
"""
end
def tldr(:vuln) do
"""
<p>Click "How to Play" for instructions.</p>
<p>Don't let your supply chain get too long;
if it reaches the top of the game board,
you go bankrupt.</p>
<p>Over time, your technical debt increases.
At some point, vulnerabilities and/or licensing
issues crop up in your supply chain</p>
<p>Vulnerabilites are yellow/grey blocks.
Licenseing issues are brown/grey blocks.
Both prevent rows from being cleared</p>
"""
end
def tldr(:scoring) do
"""
<p>Click "How to Play" for instructions.</p>
<p>Each tick of the game clock gets points</p>
<p>Clearing rows gets points. Clearing multiple Rows
at once gets expontially increasing number of points</p>
<p>Answering questions correctly gets points.
Later questions are worth more than the initial questions</p>
<p></p>
"""
end
def tldr(:scoring2) do
"""
<p>Click "How to Play" for instructions.</p>
<p>You lose points if you answer incorrectly
(much fewer than you get for answering correctly
so ok to guess).</p>
<p>Attacks and lawsuits cause you to quickly lose points
with each tick of the clock
(hit spacebar to pause)</p>
"""
end
def tldr(:rm_vuln) do
"""
<p>Click "How to Play" for instructions.</p>
<p>Too many vulnerabilities results in a cyberattack.
Too many licensing issues results in a lawsuit</p>
<p>Certain powerups help with vulnerabilities,
licensing issues, attacks, and lawsuits</p>
<p></p>
<p></p>
"""
end
def tldr(:clrblocks) do
"""
<p>Click "How to Play" for instructions.</p>
<p>ClearBlocks or the "eraser" powerup <i class="fas fa-eraser"></i>
is a valuable powerup which clears the board of all
blocks including vulnerabilities,
licensing issues, attacks, and lawsuits</p>
<p>The 9 Erasers are rewards for answering questions
in the Phoenix category (ie you rise from the ashes)</p>
"""
end
def tldr(:speed) do
"""
<p>Click "How to Play" for instructions.</p>
<p>Certain powerups help with slowing up or speeding up</p>
<p>Faster speeds mean more points - not just becasue ticks
come more quickly, but also the number of points per click increases</p>
<p>Faster speeds also increase the multiplier on clearing rows</p>
<p></p>
"""
end
def tldr(:addblock) do
"""
<p>Click "How to Play" for instructions.</p>
<p><i class="fas fa-plus-square"></i> is the addblock powerup</p>
<p>It allows you to place a block in a free square</p>
<p>It is a reward for certain quiz questions in Supply Chain Category</p>
"""
end
def tldr(:delblock) do
"""
<p>Click "How to Play" for instructions.</p>
<p>need delete block hints</p>
<p></p>
<p></p>
"""
end
def tldr(:mvblock) do
"""
<p>Click "How to Play" for instructions.</p>
<p>need moveblock hints</p>
<p></p>
<p></p>
"""
end
def tldr(:speedup) do
"""
<p>Click "How to Play" for instructions.</p>
<p>The speedup powerup (<i class="fas fa-fast-forward"></i>)
speeds upthe pace of the game (e.g. from lethargic to sedate)</p>
<p>It is most useful when hit by a lawsuit,
since lawsuits slow down the game up to almost halted.</p>
<p>Always keep a few in reserve.</p>
<p>Speedup powerups can be found - which category(ies)?</p>
"""
end
def tldr(:slowdown) do
"""
<p>Click "How to Play" for instructions.</p>
<p>The slowdown powerup (<i class="fas fa-fast-backward"></i>)
slows down the pace of the game (e.g. from fast to moderate)</p>
<p>It is most useful when hit by a cyberattack,
since cyberattacks speed the game up uncontrollably.</p>
<p>Always keep a few in reserve.</p>
<p>Slowdown powerups can be found - which category(ies)?</p>
"""
end
def tldr(:superpower) do
"""
<p>Click "How to Play" for instructions.</p>
<p>Make sure to answer questions in the vendor category.</p>
<p>Vendors is the only place you can get 'superpower' powerup.</p>
<p>Superpower (<i class="fab fa-superpowers"></i> ) can be traded
in for other powers of your choice.</p>
<p>Superpower is needed to get the powerups that stop cyberattacks or lawsuits</p>
"""
end
def tldr(_hint) do
"Oops!"
end
def next_hint(previous_hint) do
%{
intro: :mobile,
mobile: :quiz,
quiz: :scoring,
scoring: :scoring2,
scoring2: :vuln,
vuln: :rm_vuln,
rm: :clrblocks,
clrblocks: :addblock,
addblock: :speed,
speed: :delblock,
delblock: :mvblock,
mvblock: :speedup,
speedup: :slowdown,
slowdown: :superpower,
superpower: :intro
}
|> Map.get(previous_hint, :intro)
end
end
|
lib/quadquizaminos/hints.ex
| 0.52683
| 0.709397
|
hints.ex
|
starcoder
|
defmodule HL7.Examples do
@moduledoc """
Functions to provide sample HL7 data which can be used to explore the API.
"""
@doc """
Returns a sample HL7 string from [Wikipedia's HL7 article](https://en.wikipedia.org/wiki/Health_Level_7#Version_2_messaging).
The HL7 version of the message defaults to 2.5, but can be overidden.
"""
@spec wikipedia_sample_hl7(String.t()) :: String.t()
def wikipedia_sample_hl7(version \\ "2.5")
def wikipedia_sample_hl7("2.5") do
"""
MSH|^~\\&|MegaReg|XYZHospC|SuperOE|XYZImgCtr|20060529090131-0500||ADT^A01^ADT_A01|01052901|P|2.5
EVN||200605290901||||200605290900
PID|||56782445^^^UAReg^PI||KLEINSAMPLE^BARRY^Q^JR||19620910|M||2028-9^^HL70005^RA99113^^XYZ|260 GOODWIN CREST DRIVE^^BIRMINGHAM^AL^35209^^M~NICKELL’S PICKLES^10000 W 100TH AVE^BIRMINGHAM^AL^35200^^O|||||||0105I30001^^^99DEF^AN
PV1||I|W^389^1^UABH^^^^3||||12345^MORGAN^REX^J^^^MD^0010^UAMC^L||67890^GRAINGER^LUCY^X^^^MD^0010^UAMC^L|MED|||||A0||13579^POTTER^SHERMAN^T^^^MD^0010^UAMC^L|||||||||||||||||||||||||||200605290900
OBX|1|N^K&M|^Body Height||1.80|m^Meter^ISO+|||||F
OBX|2|NM|^Body Weight||79|kg^Kilogram^ISO+|||||F
AL1|1||^ASPIRIN
DG1|1||786.50^CHEST PAIN, UNSPECIFIED^I9|||A
"""
|> String.replace("\n", "\r")
end
def wikipedia_sample_hl7(version) when is_binary(version) do
wikipedia_sample_hl7()
|> String.replace("2.5", version, global: false)
end
@spec nist_immunization_hl7() :: String.t()
def nist_immunization_hl7() do
"""
MSH|^~\\&|Test EHR Application|X68||NIST Test Iz Reg|201207010822||VXU^V04^VXU_V04|NIST-IZ-020.00|P|2.5.1|||AL|ER
PID|1||252430^^^MAA^MR||Curry^Qiang^Trystan^^^^L||20090819|M
ORC|RE||IZ-783278^NDA|||||||||57422^RADON^NICHOLAS^^^^^^NDA^L
RXA|0|1|20120814||140^Influenza^CVX|0.5|mL^MilliLiter [SI Volume Units]^UCUM||00^New immunization record^NIP001||||||W1356FE|20121214|SKB^GlaxoSmithKline^MVX|||CP|A
RXR|C28161^Intramuscular^NCIT|RA^Right Arm^HL70163
OBX|1|CE|64994-7^Vaccine funding program eligibility category^LN|1|V03^VFC eligible - Uninsured^HL70064||||||F|||20120701|||VXC40^Eligibility captured at the immunization level^CDCPHINVS
OBX|2|CE|30956-7^vaccine type^LN|2|88^Influenza, unspecified formulation^CVX||||||F
OBX|3|TS|29768-9^Date vaccine information statement published^LN|2|20120702||||||F
OBX|4|TS|29769-7^Date vaccine information statement presented^LN|2|20120814||||||F
ORC|RE||IZ-783276^NDA
RXA|0|1|20110214||133^PCV 13^CVX|999|||01^Historical information - source unspecified^NIP001
ORC|RE||IZ-783282^NDA|||||||||57422^RADON^NICHOLAS^^^^^^NDA^L
RXA|0|1|20120814||110^DTaP-Hep B-IPV^CVX|0.5|mL^MilliLiter [SI Volume Units]^UCUM||00^New immunization record^NIP001||||||78HH34I|20121214|SKB^GlaxoSmithKline^MVX|||CP|A
RXR|C28161^Intramuscular^NCIT|LA^Left Arm^HL70163
OBX|1|CE|64994-7^Vaccine funding program eligibility category^LN|1|V03^VFC eligible - Uninsured^HL70064||||||F|||20120701|||VXC40^Eligibility captured at the immunization level^CDCPHINVS
OBX|2|CE|30956-7^vaccine type^LN|2|107^DTaP^CVX||||||F
OBX|3|TS|29768-9^Date vaccine information statement published^LN|2|20070517||||||F
OBX|4|TS|29769-7^Date vaccine information statement presented^LN|2|20120814||||||F
OBX|5|CE|30956-7^vaccine type^LN|3|89^Polio^CVX||||||F
OBX|6|TS|29768-9^Date vaccine information statement published^LN|3|20111108||||||F
OBX|7|TS|29769-7^Date vaccine information statement presented^LN|3|20120814||||||F
OBX|8|CE|30956-7^vaccine type^LN|4|45^Hep B, unspecified formulation^CVX||||||F
OBX|9|TS|29768-9^Date vaccine information statement published^LN|4|20120202||||||F
OBX|10|TS|29769-7^Date vaccine information statement presented^LN|4|20120814||||||F
"""
|> String.replace("\n", "\r")
end
@spec nist_syndromic_hl7() :: String.t()
def nist_syndromic_hl7() do
"""
MSH|^~\\&||LakeMichMC^9879874000^NPI|||201204020040||ADT^A03^ADT_A03|NIST-SS-003.32|P|2.5.1|||||||||PH_SS-NoAck^SS Sender^2.16.840.1.114222.4.10.3^ISO
EVN||201204020030|||||LakeMichMC^9879874000^NPI
PID|1||33333^^^^MR||^^^^^^~^^^^^^S|||F||2106-3^^CDCREC|^^^^53217^^^^55089|||||||||||2186-5^^CDCREC
PV1|1||||||||||||||||||33333_001^^^^VN|||||||||||||||||09||||||||201204012130
DG1|1||0074^Cryptosporidiosis^I9CDX|||F
DG1|2||27651^Dehydration^I9CDX|||F
DG1|3||78791^Diarrhea^I9CDX|||F
OBX|1|CWE|SS003^^PHINQUESTION||261QE0002X^Emergency Care^NUCC||||||F
OBX|2|NM|21612-7^^LN||45|a^^UCUM|||||F
OBX|3|CWE|8661-1^^LN||^^^^^^^^Diarrhea, stomach pain, dehydration||||||F
"""
|> String.replace("\n", "\r")
end
end
|
lib/hl7/examples.ex
| 0.751785
| 0.668001
|
examples.ex
|
starcoder
|
defmodule OddJob.Job do
@moduledoc """
The `OddJob.Job` struct holds all of the useful information about a job.
"""
@moduledoc since: "0.1.0"
@spec __struct__ :: OddJob.Job.t()
defstruct [:ref, :owner, :function, :results, :proxy, async: false]
@typedoc """
The `OddJob.Job` struct is the datatype that is passed between processes charged with performing
the job.
It holds all of the data that is necessary to link, monitor, perform work, and return results
to the caller.
The job struct is only returned to the caller when using the async/await pattern. When the caller receives
the struct after calling `OddJob.async_perform/2` the `:results` field is always `nil`, even though the
work could conceivably already be done. This is because the results are not waited on at the time the
struct is created. The results are only known when passing the job to `OddJob.await/2` or matching on the
`{ref, results}` message.
* `:function` is the anonymous function that will be performed by the worker
* `:results` is the term that is returned by `function`. This is only used internally by the
processes performing the work.
* `:async` is an boolean identifying if the job's results can be awaited on
* `:ref` is the unique monitor reference of the job
* `:owner` is the pid of the calling process, i.e. `self()`
* `:proxy` is the `pid` of the proxy server that creates the job and routes the results. The `owner`
links and monitors the `proxy`, while the `proxy` links and monitors the worker. Exit messages and failures
cascade up to the `owner`. The worker sends results back to the `proxy`, which then sends them to the
`owner` before exiting with reason `:normal`.
"""
@typedoc since: "0.1.0"
@type t :: %__MODULE__{
async: boolean,
ref: reference | nil,
owner: pid | nil,
proxy: pid | nil,
function: function | nil,
results: term | nil
}
end
|
lib/odd_job/job.ex
| 0.841923
| 0.60826
|
job.ex
|
starcoder
|
defmodule Chopsticks.Learn.Generator do
@moduledoc """
GenServer for generating and remembering moves.
"""
use GenServer
alias Chopsticks.Engine
alias Chopsticks.Random
# Client
def start_link do
GenServer.start_link(__MODULE__, %{1 => [], 2 => []})
end
def take_turn(pid, player_number, players) do
player = players[player_number]
next_number = Engine.next_player_number(player_number)
opponent = players[next_number]
{type, data} = move = Random.random_move(player, opponent)
case type do
:split ->
GenServer.cast(pid, {:turn, %{type: :split,
player_number: player_number,
player: player,
opponent: opponent}})
:touch ->
{player_direction, opponent_direction} = data
GenServer.cast(pid, {:turn, %{type: :touch,
player_direction: player_direction,
opponent_direction: opponent_direction,
player_number: player_number,
player: player,
opponent: opponent}})
end
move
end
def won(pid, player_number) do
win = GenServer.call(pid, {:win, player_number})
GenServer.stop(pid)
win
end
def tied(pid) do
GenServer.stop(pid)
end
# Sever
def handle_cast({:turn, %{type: type,
player_number: player_number,
player: player,
opponent: opponent} = data}, state) do
move =
case type do
:touch ->
%{
from: player[data.player_direction],
to: opponent[data.opponent_direction]
}
:split ->
nil
end
record = %{
type: type,
player: normalize_player(player),
opponent: normalize_player(opponent),
move: move
}
state = Map.update!(state, player_number, fn
state -> [record | state]
end)
{:noreply, state}
end
def handle_call({:win, player_number}, _from, records) do
{:reply, records[player_number], nil}
end
# Normalizes a player map to be a list of the two hands, in value order.
defp normalize_player(player) do
player
|> Map.values
|> Enum.sort
|> List.to_tuple
end
end
defmodule Chopsticks.Learn do
@moduledoc """
Functions for learning how to play Chopsticks.
"""
alias Chopsticks.Engine
alias Chopsticks.Learn.Generator
@doc """
Learn from playing some random trials.
"""
def learn, do: learn(100, [])
def learn(games), do: learn(games, [])
def learn(0, wins) do
wins
|> List.flatten
|> Enum.reduce(%{}, fn
%{
type: type,
player: player,
opponent: opponent,
move: move_data
}, acc ->
# Use these as keys for later lookup.
game_state = {player, opponent}
move = {type, move_data}
# Each game state should have an array of moves, with a count of each move.
Map.update(acc, game_state, %{move => 1}, fn moves ->
Map.update(moves, move, 1, &(&1 + 1))
end)
end)
|> IO.inspect
end
def learn(i, wins) do
{:ok, pid} = Generator.start_link
winner = Engine.play(
20,
get_move: fn player_number, players ->
Generator.take_turn(pid, player_number, players)
end,
display_error: fn code ->
raise code
end
)
case winner do
0 ->
Generator.tied(pid)
# Ignore this iteration
learn(i, wins)
winner ->
win = Generator.won(pid, winner)
learn(i - 1, [win | wins])
end
end
end
|
lib/chopsticks/learn.ex
| 0.792745
| 0.584153
|
learn.ex
|
starcoder
|
defmodule Parseus do
@moduledoc """
Legendary Elixir parser to tame all your input enumerables for good.
## Usage
Here's a basic usage:
input = %{
"name" => "Mike",
"email" => "<EMAIL>"
"age" => "21",
"license-agreement" => "1",
"notes" => "Please don't send me e-mails!",
}
import Parseus
%{output: output, errors: []} =
input
|> cast(["name", "email", "license-agreement", "age", "notes"])
|> validate_required([:name, :email, :license_agreement])
|> validate_format(:email, ~r/^.*@example\\.com$/)
|> parse_boolean(:lincense_agreement)
|> validate_equal(:license_agreement, true)
|> drop_key(:license_agreement)
|> parse_integer(:age)
IO.inspect(output)
# [full_name: "Mike", email: "<EMAIL>", age: 21, notes: "..."]
## Details
### Parsing key(s)
Key parsers get invoked via all `parse_*` built-in parsing functions which ultimately call generic
`parse/4` (which can also be invoked with user-defined parsers).
Here's how they work:
- if there's no specific key in the input, the parser will not execute
- if there's already an error associated with the key, the parser will not execute
- otherwise the parser gets called with the current value of the key
- if parser succeeds, the output value associated with the key gets updated
- if parser fails, the key gets removed from the output and appropriate error gets added
This basically means that if you pipe multiple parsers on the same key, they'll all get executed
in a sequence with the output from previous parser getting passed to the next one, until the first
parser failure, in which case subsequent parsers will not be called at all. In case of failure,
the input value is no longer considered usable as an output and gets removed from it.
### Validating key(s)
Key validators get invoked via all `validate_*` built-in validation functions which ultimately
call generic `validate/4` (which can also be invoked with user-defined validators).
Here's how they work:
- if there's no specific key in the input, the parser will not execute
- otherwise the validator gets called with the current value of the key
- if validator succeeds, nothing happens
- if validator fails, an appropriate error gets added
Key validators are a bit similar to key parsers, but they don't change the output (because they're
not meant for that) and they still get called if there's already an error associated with the key
(because we want to have as many errors as possible).
> Note that there's still a way to avoid calling the specific validator upon some previous failed
> assertion - this is where the parser's property of removing failed keys comes to use. You can
> just call the parser before the validator and if parser fails, the validator won't get called.
### Validating multiple keys
Global validators get invoked via the generic `validate_all/3` which can be invoked with
user-defined validators.
Here's how they work:
- the validator gets called with the set of current values
- if validator succeeds, nothing happens
- if validator fails, an appropriate error or set of errors gets added
As opposed to key validators, global validators get the whole set of current values as its input
instead of a value of a single key. This allows them to implement a cross-key logical validation.
They're also called regardless of which keys are filled in the input.
"""
alias __MODULE__.{
BlankStringToNilMapper,
BooleanParser,
DateParser,
EnumParser,
FloatParser,
IntegerParser,
ListParser,
AddErrorProcessor,
CastAllInProcessor,
CastInProcessor,
CastProcessor,
DropInvalidProcessor,
DropNilProcessor,
DropProcessor,
FilterProcessor,
ForkProcessor,
JoinProcessor,
MapProcessor,
ParseProcessor,
RenameProcessor,
ValidateAllProcessor,
ValidateProcessor,
FlattenErrorsUtil,
GetInputPathUtil,
ResolveUtil,
ResolveTupleUtil,
AcceptanceValidator,
BooleanValidator,
ExclusionValidator,
FormatValidator,
InclusionValidator,
LengthValidator,
NumberValidator,
RequiredValidator,
TypeValidator,
}
def add_error(set, output_key_or_path, error) do
AddErrorProcessor.call(set, output_key_or_path, error)
end
def cast(input, input_key_or_keys) do
CastProcessor.call(input, input_key_or_keys)
end
def cast_all_in(input, input_key_or_path, output_key, mod_or_func) do
CastAllInProcessor.call(input, input_key_or_path, output_key, mod_or_func)
end
def cast_in(input, input_key_or_path, output_key \\ nil, mod_or_func) do
CastInProcessor.call(input, input_key_or_path, output_key, mod_or_func)
end
def drop(set, key_or_keys) do
DropProcessor.call(set, key_or_keys)
end
def drop_invalid(set, key_or_keys \\ nil) do
DropInvalidProcessor.call(set, key_or_keys)
end
def drop_nil(set, key_or_keys \\ nil) do
DropNilProcessor.call(set, key_or_keys)
end
def filter(set, key_or_keys, mod_or_func) do
FilterProcessor.call(set, key_or_keys, mod_or_func)
end
def flatten_errors(set) do
FlattenErrorsUtil.call(set)
end
def fork(set, source_key, target_key) do
ForkProcessor.call(set, source_key, target_key)
end
def join(set, old_keys, new_key, opts \\ []) do
JoinProcessor.call(set, old_keys, new_key, opts)
end
def get_input_path(set, output_key_or_path) do
GetInputPathUtil.call(set, output_key_or_path)
end
def map(set, key_or_keys, mod_or_func) do
MapProcessor.call(set, key_or_keys, mod_or_func)
end
def map_blank_string_to_nil(set, key_or_keys) do
map(set, key_or_keys, BlankStringToNilMapper)
end
def parse(set, key_or_keys, mod_or_func, opts \\ []) do
ParseProcessor.call(set, key_or_keys, mod_or_func, opts)
end
def parse_boolean(set, key_or_keys) do
parse(set, key_or_keys, BooleanParser)
end
def parse_date(set, key_or_keys) do
parse(set, key_or_keys, DateParser)
end
def parse_enum(set, key_or_keys, allowed_values) do
parse(set, key_or_keys, EnumParser, allowed_values)
end
def parse_float(set, key_or_keys) do
parse(set, key_or_keys, FloatParser)
end
def parse_integer(set, key_or_keys) do
parse(set, key_or_keys, IntegerParser)
end
def parse_list(set, key_or_keys, opts \\ []) do
parse(set, key_or_keys, ListParser, opts)
end
def rename(set, old_key, new_key) do
RenameProcessor.call(set, old_key, new_key)
end
def resolve(set) do
ResolveUtil.call(set)
end
def resolve_tuple(set, key_or_keys) do
ResolveTupleUtil.call(set, key_or_keys)
end
def validate(set, key_or_keys, mod_or_func, opts \\ []) do
ValidateProcessor.call(set, key_or_keys, mod_or_func, opts)
end
def validate_acceptance(set, key_or_keys) do
validate(set, key_or_keys, AcceptanceValidator)
end
def validate_all(set, validator, opts \\ []) do
ValidateAllProcessor.call(set, validator, opts)
end
def validate_boolean(set, key_or_keys) do
validate(set, key_or_keys, BooleanValidator)
end
def validate_exclusion(set, key_or_keys, forbidden_values) do
validate(set, key_or_keys, ExclusionValidator, forbidden_values)
end
def validate_format(set, key_or_keys, format) do
validate(set, key_or_keys, FormatValidator, format)
end
def validate_inclusion(set, key_or_keys, allowed_values) do
validate(set, key_or_keys, InclusionValidator, allowed_values)
end
def validate_length(set, key_or_keys, opts) do
validate(set, key_or_keys, LengthValidator, opts)
end
def validate_number(set, key_or_keys, opts \\ []) do
validate(set, key_or_keys, NumberValidator, opts)
end
def validate_required(set, key_or_keys) do
validate_all(set, RequiredValidator, key_or_keys)
end
def validate_type(set, key_or_keys, type_or_types) do
validate(set, key_or_keys, TypeValidator, type_or_types)
end
end
|
lib/parseus.ex
| 0.651798
| 0.438124
|
parseus.ex
|
starcoder
|
defmodule TypedStruct do
@moduledoc """
TypedStruct is a library for defining structs with a type without writing
boilerplate code.
## Rationale
To define a struct in Elixir, you probably want to define three things:
* the struct itself, with default values,
* the list of enforced keys,
* its associated type.
It ends up in something like this:
defmodule Person do
@moduledoc \"\"\"
A struct representing a person.
\"\"\"
@enforce_keys [:name]
defstruct name: nil,
age: nil,
happy?: true,
phone: nil
@typedoc "A person"
@type t() :: %__MODULE__{
name: String.t(),
age: non_neg_integer() | nil,
happy?: boolean(),
phone: String.t() | nil
}
end
In the example above you can notice several points:
* the keys are present in both the `defstruct` and type definition,
* enforced keys must also be written in `@enforce_keys`,
* if a key has no default value and is not enforced, its type should be
nullable.
If you want to add a field in the struct, you must therefore:
* add the key with its default value in the `defstruct` list,
* add the key with its type in the type definition.
If the field is not optional, you should even add it to `@enforce_keys`. This
is way too much work for lazy people like me, and moreover it can be
error-prone.
It would be way better if we could write something like this:
defmodule Person do
@moduledoc \"\"\"
A struct representing a person.
\"\"\"
use TypedStruct
typedstruct do
@typedoc "A person"
field :name, String.t(), enforce: true
field :age, non_neg_integer()
field :happy?, boolean(), default: true
field :phone, String.t()
end
end
Thanks to TypedStruct, this is now possible :)
## Usage
### Setup
To use TypedStruct in your project, add this to your Mix dependencies:
{:typed_struct, "~> #{Mix.Project.config()[:version]}"}
If you do not plan to compile modules using TypedStruct at runtime, you can
add `runtime: false` to the dependency tuple as TypedStruct is only used at
build time.
If you want to avoid `mix format` putting parentheses on field definitions,
you can add to your `.formatter.exs`:
[
...,
import_deps: [:typed_struct]
]
### General usage
To define a typed struct, use `TypedStruct`, then define your struct within a
`typedstruct` block:
defmodule MyStruct do
# Use TypedStruct to import the typedstruct macro.
use TypedStruct
# Define your struct.
typedstruct do
# Define each field with the field macro.
field :a_string, String.t()
# You can set a default value.
field :string_with_default, String.t(), default: "default"
# You can enforce a field.
field :enforced_field, integer(), enforce: true
end
end
Each field is defined through the `field/2` macro.
### Options
If you want to enforce all the keys by default, you can do:
defmodule MyStruct do
use TypedStruct
# Enforce keys by default.
typedstruct enforce: true do
# This key is enforced.
field :enforced_by_default, term()
# You can override the default behaviour.
field :not_enforced, term(), enforce: false
# A key with a default value is not enforced.
field :not_enforced_either, integer(), default: 1
end
end
You can also generate an opaque type for the struct:
defmodule MyOpaqueStruct do
use TypedStruct
# Generate an opaque type for the struct.
typedstruct opaque: true do
field :name, String.t()
end
end
If you often define submodules containing only a struct, you can avoid
boilerplate code:
defmodule MyModule do
use TypedStruct
# You now have %MyModule.Struct{}.
typedstruct module: Struct do
field :field, term()
end
end
### Documentation
To add a `@typedoc` to the struct type, just add the attribute in the
`typedstruct` block:
typedstruct do
@typedoc "A typed struct"
field :a_string, String.t()
field :an_int, integer()
end
You can also document submodules this way:
typedstruct module: MyStruct do
@moduledoc "A submodule with a typed struct."
@typedoc "A typed struct in a submodule"
field :a_string, String.t()
field :an_int, integer()
end
### Plugins
It is possible to extend the scope of TypedStruct by using its plugin
interface, as described in `TypedStruct.Plugin`. For instance, to
automatically generate lenses with the [Lens](https://github.com/obrok/lens)
library, you can use
[`TypedStructLens`](https://github.com/ejpcmac/typed_struct_lens) and do:
defmodule MyStruct do
use TypedStruct
typedstruct do
plugin TypedStructLens
field :a_field, String.t()
field :other_field, atom()
end
@spec change(t()) :: t()
def change(data) do
# a_field/0 is generated by TypedStructLens.
lens = a_field()
put_in(data, [lens], "Changed")
end
end
## What do I get?
When defining an empty `typedstruct` block:
defmodule Example do
use TypedStruct
typedstruct do
end
end
you get an empty struct with its module type `t()`:
defmodule Example do
@enforce_keys []
defstruct []
@type t() :: %__MODULE__{}
end
Each `field` call adds information to the struct, `@enforce_keys` and the type
`t()`.
A field with no options adds the name to the `defstruct` list, with `nil` as
default. The type itself is made nullable:
defmodule Example do
use TypedStruct
typedstruct do
field :name, String.t()
end
end
becomes:
defmodule Example do
@enforce_keys []
defstruct name: nil
@type t() :: %__MODULE__{
name: String.t() | nil
}
end
The `default` option adds the default value to the `defstruct`:
field :name, String.t(), default: "<NAME>"
# Becomes
defstruct name: "<NAME>"
When set to `true`, the `enforce` option enforces the key by adding it to the
`@enforce_keys` attribute.
field :name, String.t(), enforce: true
# Becomes
@enforce_keys [:name]
defstruct name: nil
In both cases, the type has no reason to be nullable anymore by default. In
one case the field is filled with its default value and not `nil`, and in the
other case it is enforced. Both options would generate the following type:
@type t() :: %__MODULE__{
name: String.t() # Not nullable
}
Passing `opaque: true` replaces `@type` with `@opaque` in the struct type
specification:
typedstruct opaque: true do
field :name, String.t()
end
generates the following type:
@opaque t() :: %__MODULE__{
name: String.t()
}
When passing `module: ModuleName`, the whole `typedstruct` block is wrapped in
a module definition. This way, the following definition:
defmodule MyModule do
use TypedStruct
typedstruct module: Struct do
field :field, term()
end
end
becomes:
defmodule MyModule do
defmodule Struct do
@enforce_keys []
defstruct field: nil
@type t() :: %__MODULE__{
field: term() | nil
}
end
end
"""
@doc false
defmacro __using__(_) do
quote do
import TypedStruct, only: [typedstruct: 1, typedstruct: 2]
end
end
@doc """
Defines a typed struct.
Inside a `typedstruct` block, each field is defined through the `field/2`
macro.
## Options
* `enforce` - if set to true, sets `enforce: true` to all fields by default.
This can be overridden by setting `enforce: false` or a default value on
individual fields.
* `opaque` - if set to true, creates an opaque type for the struct.
* `module` - if set, creates the struct in a submodule named `module`.
## Examples
defmodule MyStruct do
use TypedStruct
typedstruct do
field :field_one, String.t()
field :field_two, integer(), enforce: true
field :field_three, boolean(), enforce: true
field :field_four, atom(), default: :hey
end
end
The following is an equivalent using the *enforce by default* behaviour:
defmodule MyStruct do
use TypedStruct
typedstruct enforce: true do
field :field_one, String.t(), enforce: false
field :field_two, integer()
field :field_three, boolean()
field :field_four, atom(), default: :hey
end
end
You can create the struct in a submodule instead:
defmodule MyModule do
use TypedStruct
typedstruct, module: Struct do
field :field_one, String.t()
field :field_two, integer(), enforce: true
field :field_three, boolean(), enforce: true
field :field_four, atom(), default: :hey
end
end
"""
defmacro typedstruct(opts \\ [], do: block) do
if is_nil(opts[:module]) do
quote do
Module.eval_quoted(
__ENV__,
TypedStruct.__typedstruct__(
unquote(Macro.escape(block)),
unquote(opts)
)
)
end
else
quote do
defmodule unquote(opts[:module]) do
Module.eval_quoted(
__ENV__,
TypedStruct.__typedstruct__(
unquote(Macro.escape(block)),
unquote(opts)
)
)
end
end
end
end
@doc false
def __typedstruct__(block, opts) do
quote do
Module.register_attribute(__MODULE__, :ts_plugins, accumulate: true)
Module.register_attribute(__MODULE__, :ts_fields, accumulate: true)
Module.register_attribute(__MODULE__, :ts_types, accumulate: true)
Module.register_attribute(__MODULE__, :ts_enforce_keys, accumulate: true)
Module.put_attribute(__MODULE__, :ts_enforce?, unquote(!!opts[:enforce]))
Module.put_attribute(__MODULE__, :ts_type?, unquote(!opts[:no_type]))
# Create a scope to avoid leaks.
(fn ->
import TypedStruct
unquote(block)
end).()
@enforce_keys @ts_enforce_keys
defstruct @ts_fields
if unquote(!opts[:no_type]) do
TypedStruct.__type__(@ts_types, unquote(opts))
end
Enum.each(@ts_plugins, fn {plugin, plugin_opts} ->
if {:after_definition, 1} in plugin.__info__(:functions) do
Module.eval_quoted(__MODULE__, plugin.after_definition(plugin_opts))
end
end)
Module.delete_attribute(__MODULE__, :ts_type?)
Module.delete_attribute(__MODULE__, :ts_enforce?)
Module.delete_attribute(__MODULE__, :ts_enforce_keys)
Module.delete_attribute(__MODULE__, :ts_types)
Module.delete_attribute(__MODULE__, :ts_fields)
Module.delete_attribute(__MODULE__, :ts_plugins)
end
end
@doc false
defmacro __type__(types, opts) do
if Keyword.get(opts, :opaque, false) do
quote bind_quoted: [types: types] do
@opaque t() :: %__MODULE__{unquote_splicing(types)}
end
else
quote bind_quoted: [types: types] do
@type t() :: %__MODULE__{unquote_splicing(types)}
end
end
end
@doc """
Registers a plugin for the currently defined struct.
## Example
typedstruct do
plugin MyPlugin
field :a_field, String.t()
end
For more information on how to define your own plugins, please see
`TypedStruct.Plugin`. To use a third-party plugin, please refer directly to
its documentation.
"""
defmacro plugin(plugin, opts \\ []) do
quote do
Module.put_attribute(
__MODULE__,
:ts_plugins,
{unquote(plugin), unquote(opts)}
)
require unquote(plugin)
unquote(plugin).init(unquote(opts))
end
end
@no_type nil
@doc """
Defines a field in a typed struct.
## Example
# A field named :example of type String.t()
field :example, String.t()
# Or, if no_type: true is set
field :example
## Options
* `default` - sets the default value for the field
* `enforce` - if set to true, enforces the field and makes its type
non-nullable
"""
defmacro field(name), do: field_h(name, @no_type, [])
defmacro field(name, [{_,_} | _] = opts), do: field_h(name, @no_type, opts)
defmacro field(name, type), do: field_h(name, type, [])
defmacro field(name, type, opts), do: field_h(name, type, opts)
defp field_h(name, type, opts) do
quote do
TypedStruct.__field__(
__MODULE__,
unquote(name),
unquote(Macro.escape(type)),
unquote(opts)
)
Enum.each(@ts_plugins, fn {plugin, plugin_opts} ->
if {:field, 3} in plugin.__info__(:functions) do
Module.eval_quoted(
__MODULE__,
plugin.field(
unquote(name),
unquote(Macro.escape(type)),
unquote(opts) ++ plugin_opts
)
)
end
end)
end
end
@doc false
def __field__(mod, name, type, opts) when is_atom(name) do
if mod |> Module.get_attribute(:ts_fields) |> Keyword.has_key?(name) do
raise ArgumentError, "the field #{inspect(name)} is already set"
end
if Module.get_attribute(mod, :ts_type?) do
if type == @no_type, do: raise ArgumentError,
"the field #{inspect(name)} is missing a type specifier, " <>
"or else you must specify no_type: true"
else
if type != @no_type, do: raise ArgumentError,
"the field #{inspect(name)} has a type specifier, " <>
"but no_type: true has been set"
end
has_default? = Keyword.has_key?(opts, :default)
enforce_by_default? = Module.get_attribute(mod, :ts_enforce?)
enforce? =
if is_nil(opts[:enforce]),
do: enforce_by_default? && !has_default?,
else: !!opts[:enforce]
nullable? = !has_default? && !enforce?
Module.put_attribute(mod, :ts_fields, {name, opts[:default]})
Module.put_attribute(mod, :ts_types, {name, type_for(type, nullable?)})
if enforce?, do: Module.put_attribute(mod, :ts_enforce_keys, name)
end
def __field__(_mod, name, _type, _opts) do
raise ArgumentError, "a field name must be an atom, got #{inspect(name)}"
end
# Makes the type nullable if the key is not enforced.
defp type_for(type, false), do: type
defp type_for(type, _), do: quote(do: unquote(type) | nil)
end
|
lib/typed_struct.ex
| 0.853989
| 0.553445
|
typed_struct.ex
|
starcoder
|
defmodule Riptide.Store.Riptide do
@moduledoc """
This store forwards all mutations and queries to another Riptide instance. This is useful if you have multiple Riptide instances but want all of them to write and read from a primary node. This is comparable to web applications that write to a centralized Postgres database. Used in conjuction with `Riptide.Store.Composite` will allow you to store some data globally and some data locally.
## Configuration
A primary node will only accept connections from nodes configured with the same `token`. In your primary node be sure to set the following configuration with a random token:
```elixir
config :riptide, %{
store: %{
token: "mytoken"
}
}
```
In your child node add the following configuration:
```elixir
config :riptide, %{
store: %{
read: {Riptide.Store.Riptide, []},
write: {Riptide.Store.Riptide, []},
}
}
```
Additionally in the child nodes setup a connection to the primary node in your `application.ex`:
```elixir
children = [
{Riptide.Store.Riptide,
[
url: "https://primary-node:12000/socket",
name: :riptide,
token: "mytoken"
]},
Riptide,
]
```
This will startup a connection to the primary node before starting up Riptide locally. All data now will be written to and read from the primary node.
## Options
- `:name` - name of connection to primary node, defaults to `:riptide` (optional)
"""
@behaviour Riptide.Store
@doc """
Starts a connection to a remote Riptide instance
## Options
- `:name` - name of connection
- `:url` - url of remote node (required)
- `:token` - authorization token (required)
"""
def child_spec(opts) do
Riptide.Store.Riptide.Supervisor.child_spec(opts)
end
@impl true
def init(_opts) do
:ok
end
def opts_name(opts), do: Keyword.get(opts, :name, :riptide)
@impl true
def mutation(merges, deletes, opts) do
mut = %{
merge:
Enum.reduce(merges, %{}, fn {path, value}, collect ->
Dynamic.put(collect, path, value)
end),
delete:
Enum.reduce(deletes, %{}, fn {path, value}, collect ->
Dynamic.put(collect, path, value)
end)
}
{:ok, _} = Riptide.Connection.call(opts_name(opts), "riptide.store.mutation", mut)
:ok
end
@impl true
def query(paths, opts) do
query =
Enum.reduce(paths, %{}, fn {path, value}, collect -> Dynamic.put(collect, path, value) end)
{:ok, result} =
Riptide.Connection.call(
opts_name(opts),
"riptide.store.query",
query
)
result
|> Stream.map(fn [path, values] ->
{
path,
Stream.map(values, fn [p, v] -> {p, v} end)
}
end)
end
end
defmodule Riptide.Store.Riptide.Supervisor do
@moduledoc false
use GenServer
require Logger
def start_link(opts) do
opts = Enum.into(opts, %{})
GenServer.start_link(__MODULE__, opts)
end
def init(opts) do
Process.flag(:trap_exit, true)
{:ok, conn} = connect(opts)
{:ok,
%{
conn: conn,
opts: opts
}}
end
def handle_info({:EXIT, pid, _reason}, state = %{conn: pid}) do
{:ok, conn} = connect(state.opts)
{:noreply, Map.put(state, :conn, conn)}
end
def connect(opts) do
Logger.info("Connecting to Riptide at #{opts.url}")
{:ok, conn} = Riptide.Websocket.Client.start_link([url: opts.url], name: opts.name)
{:ok, _} = Riptide.Connection.call(conn, "riptide.store.upgrade", opts.token)
{:ok, conn}
end
end
|
packages/elixir/lib/riptide/store/store_riptide.ex
| 0.891876
| 0.797675
|
store_riptide.ex
|
starcoder
|
defmodule Mix.Tasks.Dialyzer do
@shortdoc "Runs dialyzer with default or project-defined flags."
@moduledoc """
This task compiles the mix project, creates a PLT with dependencies if needed and runs `dialyzer`. Much of its behavior can be managed in configuration as described below.
If executed outside of a mix project, it will build the core PLT files and exit.
## Command line options
* `--no-compile` - do not compile even if needed.
* `--no-check` - do not perform (quick) check to see if PLT needs updated.
* `--halt-exit-status` - exit immediately with same exit status as dialyzer.
useful for CI. do not use with `mix do`.
* `--plt` - only build the required plt(s) and exit.
Warning flags passed to this task are passed on to `:dialyzer`.
e.g.
`mix dialyzer --unmatched_returns`
## Configuration
All configuration is included under a dialyzer key in the mix project keyword list.
### Flags
You can specify any `dialyzer` command line argument with the :flags keyword.
Dialyzer supports a number of warning flags used to enable or disable certain kinds of analysis features. Until version 0.4, `dialyxir` used by default the additional warning flags shown in the example below. However some of these create warnings that are often more confusing than helpful, particularly to new users of Dialyzer. As of 0.4, there are no longer any flags used by default. To get the old behavior, specify them in your Mix project file. For compatibility reasons you can use eiher the `-Wwarning` convention of the dialyzer CLI, or (preferred) the `WarnOpts` atoms supported by the [API](http://erlang.org/doc/man/dialyzer.html#gui-1). e.g.
```elixir
def project do
[ app: :my_app,
version: "0.0.1",
deps: deps,
dialyzer: [ flags: ["-Wunmatched_returns", :error_handling, :underspecs]]
]
end
```
### PLT Configuration
The task will build a PLT with default core Erlang applications: `:erts :kernel :stdlib :crypto` and re-use this core file in multiple projects - another core file is created for Elixir.
OTP application dependencies are (transitively) added to your project's PLT by default. The applications added are the same as you would see displayed with the command `mix app.tree`. There is also a `:plt_add_deps` option you can set to control the dependencies added. The following options are supported:
* :project - Direct Mix and OTP dependencies
* :apps_direct - Only Direct OTP application dependencies - not the entire tree
* :transitive - Include Mix and OTP application dependencies recursively
* :app_tree - Transitive OTP application dependencies e.g. `mix app.tree` (default)
```
def project do
[ app: :my_app,
version: "0.0.1",
deps: deps,
dialyzer: [plt_add_deps: :apps_direct, plt_add_apps: :wx]
]
end
```
You can also configure applications to include in the PLT more directly:
* `dialyzer: :plt_add_apps` - applications to include
*in addition* to the core applications and project dependencies.
* `dialyzer: :plt_apps` - a list of applications to include that will replace the default,
include all the apps you need e.g.
### Other Configuration
* `dialyzer: :plt_file` - Deprecated - specify the plt file name to create and use - default is to create one in the project's current build environmnet (e.g. _build/dev/) specific to the Erlang/Elixir version used. Note that use of this key in version 0.4 or later will produce a deprecation warning - you can silence the warning by providing a pair with key :no_warn e.g. `plt_file: {:no_warn,"filename"}`.
* `dialyzer: :plt_core_path` - specify an alternative to MIX_HOME to use to store the Erlang and Elixir core files.
* `dialyzer: :ignore_warnings` - specify file path to filter well-known warnings.
"""
use Mix.Task
import System, only: [user_home!: 0]
alias Dialyxir.Project
alias Dialyxir.Plt
alias Dialyxir.Dialyzer
@default_warnings [ :unknown ]
@command_options [ no_compile: :boolean,
no_check: :boolean,
halt_exit_status: :boolean,
plt: :boolean ]
def run(args) do
check_dialyzer()
compatibility_notice()
if Mix.Project.get() do
Project.check_config()
{opts, _, dargs} = OptionParser.parse(args, strict: @command_options)
unless opts[:no_compile], do: Mix.Project.compile([])
_ = unless no_check?(opts), do: check_plt()
unless opts[:plt], do: run_dialyzer(opts, dargs)
else
IO.puts "No mix project found - checking core PLTs..."
Project.plts_list([], false) |> Plt.check()
end
end
defp no_check?(opts) do
case {in_child?(), no_plt?()} do
{true, true} ->
IO.puts "In an Umbrella child and no PLT found - building that first."
build_parent_plt()
true
{true, false} ->
IO.puts "In an Umbrella child, not checking PLT..."
true
_ -> opts[:no_check]
end
end
defp check_plt() do
IO.puts "Checking PLT..."
{apps, hash} = dependency_hash()
if check_hash?(hash) do
IO.puts "PLT is up to date!"
else
Project.plts_list(apps) |> Plt.check()
File.write(plt_hash_file(), hash)
end
end
defp run_dialyzer(opts, dargs) do
args = [ { :check_plt, false },
{ :init_plt, String.to_charlist(Project.plt_file()) },
{ :files_rec, Project.dialyzer_paths() },
{ :warnings, dialyzer_warnings(dargs) } ]
IO.puts "Starting Dialyzer"
IO.inspect args, label: "dialyzer args"
{ _, exit_status, result } = Dialyzer.dialyze(args)
Enum.each(result, &IO.puts/1)
if opts[:halt_exit_status], do: :erlang.halt(exit_status)
end
defp dialyzer_warnings(dargs) do
raw_opts = Project.dialyzer_flags() ++ Enum.map(dargs, &elem(&1,0))
transform(raw_opts) ++ @default_warnings -- Project.dialyzer_removed_defaults()
end
defp transform(options) when is_list(options), do: Enum.map(options, &transform/1)
defp transform(option) when is_atom(option), do: option
defp transform(option) when is_binary(option) do
option
|> String.replace_leading("-W", "")
|> String.replace("--", "")
|> String.to_atom()
end
defp in_child? do
String.contains?(Mix.Project.config[:lockfile], "..")
end
defp no_plt? do
not File.exists?(Project.deps_plt())
end
defp build_parent_plt() do
parent = Mix.Project.config[:lockfile] |> Path.expand |> Path.dirname
opts = [ into: IO.stream(:stdio, :line),
stderr_to_stdout: true,
cd: parent ]
# It would seem more natural to use Mix.in_project here to start in our parent project.
# However part of the app.tree resolution includes loading all sub apps, and we will
# hit an exception when we try to do that for *this* child, which is already loaded.
{out, rc} = System.cmd("mix", ["dialyzer", "--plt"], opts)
if rc != 0 do
IO.puts("Error building parent PLT, process returned code: #{rc}\n#{out}")
end
end
defp check_dialyzer do
if not Code.ensure_loaded?(:dialyzer) do
IO.puts """
DEPENDENCY MISSING
------------------------
If you are reading this message, then Elixir and Erlang are installed but the
Erlang Dialyzer is not available. Probably this is because you installed Erlang
with your OS package manager and the Dialyzer package is separate.
On Debian/Ubuntu:
`apt-get install erlang-dialyzer`
Fedora:
`yum install erlang-dialyzer`
Arch and Homebrew include Dialyzer in their base erlang packages. Please report a Github
issue to add or correct distribution-specific information.
"""
:erlang.halt(3)
end
end
defp compatibility_notice do
old_plt = "#{user_home!()}/.dialyxir_core_*.plt"
if File.exists?(old_plt) && (!File.exists?(Project.erlang_plt()) || !File.exists?(Project.elixir_plt())) do
IO.puts """
COMPATIBILITY NOTICE
------------------------
Previous usage of a pre-0.4 version of Dialyxir detected. Please be aware that the 0.4 release
makes a number of changes to previous defaults. Among other things, the PLT task is automatically
run when dialyzer is run, PLT paths have changed,
transitive dependencies are included by default in the PLT, and no additional warning flags
beyond the dialyzer defaults are included. All these properties can be changed in configuration.
(see `mix help dialyzer`).
If you no longer use the older Dialyxir in any projects and do not want to see this notice each time you upgrade your Erlang/Elixir distribution, you can delete your old pre-0.4 PLT files. ( rm ~/.dialyxir_core_*.plt )
"""
end
end
@spec check_hash?(binary()) :: boolean()
defp check_hash?(hash) do
case File.read(plt_hash_file()) do
{:ok, stored_hash} -> hash == stored_hash
_ -> false
end
end
defp plt_hash_file, do: Project.plt_file() <> ".hash"
@spec dependency_hash :: {[atom()], binary()}
def dependency_hash do
lock_file = Mix.Dep.Lock.read |> :erlang.term_to_binary
apps = Project.cons_apps |> IO.inspect
hash = :crypto.hash(:sha, lock_file <> :erlang.term_to_binary(apps))
{apps, hash}
end
end
|
deps/dialyxir/lib/mix/tasks/dialyzer.ex
| 0.81468
| 0.795777
|
dialyzer.ex
|
starcoder
|
defmodule Square.Devices do
@moduledoc """
Documentation for `Square.Devices`.
"""
@doc """
Lists all DeviceCodes associated with the merchant.
```
def list_device_codes(client, [
cursor: nil,
location_id: nil,
product_type: nil
])
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `cursor` | `String` | Query, Optional | A pagination cursor returned by a previous call to this endpoint.<br>Provide this to retrieve the next set of results for your original query.<br><br>See [Paginating results](#paginatingresults) for more information. |
| `location_id` | `String` | Query, Optional | If specified, only returns DeviceCodes of the specified location.<br>Returns DeviceCodes of all locations if empty. |
| `product_type` | [`String (Product Type)`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/product-type.md) | Query, Optional | If specified, only returns DeviceCodes targeting the specified product type.<br>Returns DeviceCodes of all product types if empty. |
### Response Type
[`List Device Codes Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/list-device-codes-response.md)
### Example Usage
iex> Square.client |> Square.Devices.list_device_codes()
"""
@spec list_device_codes(Tesla.Client.t(), list) :: {:error, any} | {:ok, Tesla.Env.t()}
def list_device_codes(client, params \\ []),
do: Tesla.get(client, "devices/codes", query: params)
@doc """
Creates a DeviceCode that can be used to login to a Square Terminal device to enter the connected
terminal mode.
```
def create_device_code(client, body)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `body` | [`Create Device Code Request Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/create-device-code-request.md) | Body, Required | An object containing the fields to POST for the request.<br><br>See the corresponding object definition for field details. |
### Response Type
[`Create Device Code Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/create-device-code-response.md)
### Example Usage
iex> body = %{
idempotency_key: "01bb00a6-0c86-4770-94ed-f5fca973cd56",
device_code: %{
name: "Counter 1",
product_type: "TERMINAL_API",
location_id: "B5E4484SHHNYH"
}
}
iex> Square.client |> Square.Devices.create_device_code(body)
"""
@spec create_device_code(Tesla.Client.t(), map) :: {:error, any} | {:ok, Tesla.Env.t()}
def create_device_code(client, body \\ %{}), do: Tesla.post(client, "devices/codes", body)
@doc """
Retrieves DeviceCode with the associated ID.
```
def get_device_code(client, id)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `id` | `String` | Template, Required | The unique identifier for the device code. |
### Response Type
[`Get Device Code Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/get-device-code-response.md)
### Example Usage
iex> Square.client |> Square.Devices.get_device_code(id)
"""
@spec get_device_code(Tesla.Client.t(), binary) :: {:error, any} | {:ok, Tesla.Env.t()}
def get_device_code(client, id), do: Tesla.get(client, "devices/codes/#{id}")
end
|
lib/api/devices_api.ex
| 0.92761
| 0.810028
|
devices_api.ex
|
starcoder
|
defmodule Hextille.HexGrid do
require Integer
alias Hextille.Offset
alias Hextille.Cube
@moduledoc """
Module for creating HexGrids
"""
@doc ~S"""
Returns a MapSet containing grid of `row` rows and `col` columns of
hexagon tiles in Cube coordinates.
Top left corner of the grid is at %Cube{q: 0, r: 0, s: 0}, unless offset is shifted.
"""
def create(rows, columns, offset_col \\ 0, offset_row \\ 0) do
create_grid(MapSet.new, rows, columns, offset_col, offset_row)
end
@doc ~S"""
Finds the columns and rows representing boundaries of a MapSet.
iex> a = HexGrid.create(4, 4, -2, -2)
iex> b = HexGrid.create(4, 4, -3, -5)
iex> HexGrid.boundaries(a)
%{max_col: 2, max_row: 2, min_col: -2, min_row: -2}
iex> HexGrid.boundaries(b)
%{max_col: 1, max_row: -1, min_col: -3, min_row: -5}
"""
def boundaries(%MapSet{} = mapset) do
%{:min_col => mapset |> min_col,
:max_col => mapset |> max_col,
:min_row => mapset |> min_row,
:max_row => mapset |> max_row}
end
defp min_col(%MapSet{} = mapset), do: Enum.min_by(cube_to_offset_mapset(mapset), fn(x) -> x.col end).col
defp max_col(%MapSet{} = mapset), do: Enum.max_by(cube_to_offset_mapset(mapset), fn(x) -> x.col end).col
defp min_row(%MapSet{} = mapset), do: Enum.min_by(cube_to_offset_mapset(mapset), fn(x) -> x.row end).row
defp max_row(%MapSet{} = mapset), do: Enum.max_by(cube_to_offset_mapset(mapset), fn(x) -> x.row end).row
@doc ~S"""
Checks if hexagon has any neighbour in direction at MapSet
iex> h = HexGrid.create(4, 4, -2, -2)
iex> HexGrid.has_neighbour(h, %Cube{q: 1, r: -2, s: 1}, :east)
true
iex> HexGrid.has_neighbour(h, %Cube{q: 1, r: -2, s: 1}, :north_east)
false
"""
def has_neighbour(%MapSet{} = mapset, %Cube{} = h, direction) do
mapset
|> MapSet.member?(Cube.neighbour(h, direction))
end
@doc ~S"""
Checks if hexagon has any neighbours in MapSet
iex> h = HexGrid.create(4, 4, -2, -2)
iex> HexGrid.has_neighbour(h, %Cube{q: 1, r: -2, s: 1})
%{east: true, north_east: false, north_west: false, south_east: true,
south_west: true, west: true}
"""
def has_neighbour(%MapSet{} = mapset, %Cube{} = h) do
%{:north_east => has_neighbour(mapset, h, :north_east),
:east => has_neighbour(mapset, h, :east),
:south_east => has_neighbour(mapset, h, :south_east),
:south_west => has_neighbour(mapset, h, :south_west),
:west => has_neighbour(mapset, h, :west),
:north_west => has_neighbour(mapset, h, :north_west)}
end
defp create_grid(mapset, rows, _columns, _offset_col, _offset_row) when rows < 0 do
mapset
end
defp create_grid(mapset, rows, columns, offset_col, offset_row) do
mapset = create_row(mapset, rows, columns, offset_col, offset_row)
create_grid(mapset, rows - 1, columns, offset_col, offset_row)
end
defp create_row(mapset, _rows, columns, _offset_col, _offset_row) when columns < 0 do
mapset
end
defp create_row(mapset, rows, columns, offset_col, offset_row) do
hexagon = Offset.roffset_to_cube(%Offset{
col: columns + offset_col, row: rows + offset_row})
mapset
|> MapSet.put(hexagon)
|> create_row(rows, columns - 1, offset_col, offset_row)
end
defp cube_to_offset_mapset(%MapSet{} = mapset) do
Enum.map(mapset, fn(h) -> Offset.roffset_from_cube(h) end)
end
end
|
lib/hexgrid.ex
| 0.727589
| 0.598342
|
hexgrid.ex
|
starcoder
|
defmodule APIac.Authenticator do
@moduledoc """
Specification for authenticator plug
An authenticator is in charge of extracting and validating credentials. It can
also returns an error indicating how to authenticate, giving information such as
authentication scheme to use, etc.
"""
@type opts :: any()
@type credentials :: any()
@doc """
Extract the credentials from the `Plug.Conn` object
Returns `{:ok, Plug.Conn.t, credentials}` if credentials were found. It is required
to return the `Plug.Conn` object since some things can be fetched in the process
(e.g. the HTTP body). The format of `credentials` is specific to an `APIac.Authenticator`
Returns `{:error, Plug.Conn.t, %APIac.Authenticator.Unauthorized{}}` if no
credentials were found or credential extraction failed (because request is malformed,
parameters are non-standard, or any other reason). When, and only when credentials
are not present in the request, the `reason` field of the
`%APIac.Authenticator.Unauthorized{}` shall be set to the atom `:credentials_not_found`.
The semantics are the following:
- if credentials were *not* found, the HTTP `WWW-Authenticate` can be set to advertise the
calling client of the available authentication scheme
- if credentials were found but an error happens when extracting it, that is an error
(since the client tried to authenticate) and the plug pipeline execution should be
stopped
The `opts` parameter is the value returned by `Plug.init/1`
"""
@callback extract_credentials(Plug.Conn.t(), opts) ::
{:ok, Plug.Conn.t(), credentials}
| {:error, Plug.Conn.t(), %APIac.Authenticator.Unauthorized{}}
@doc """
Validate credentials previously extracted by `c:extract_credentials/2`
Returns `{:ok, Plug.Conn.t` if credentials are valid. It is required
to return the `Plug.Conn` object since some things can be fetched in the process
(e.g. the HTTP body).
Returns `{:error, Plug.Conn.t, %APIac.Authenticator.Unauthorized{}}` if
credentials are invalid
The `opts` parameter is the value returned by `Plug.init/1`
"""
@callback validate_credentials(Plug.Conn.t(), credentials, opts) ::
{:ok, Plug.Conn.t()} | {:error, Plug.Conn.t(), %APIac.Authenticator.Unauthorized{}}
@doc """
Sets the HTTP error response and halts the plug
Typically, the error is returned as:
- An error status code (e.g. '401 Unauthorized')
- `WWW-Authenticate` standard HTTP header
Specifically, it may set the headers, HTTP status code and HTTP body, depending on:
- The `#{__MODULE__}`
- The `opts[:error_response_verbosity]` function
Specifics are to be documented in implementation plugs
The `opts` parameter is the value returned by `Plug.init/1`
"""
@callback send_error_response(Plug.Conn.t(), %APIac.Authenticator.Unauthorized{}, opts) ::
Plug.Conn.t()
defmodule Unauthorized do
defexception [:authenticator, :reason]
def exception(authenticator, reason) do
%__MODULE__{authenticator: authenticator, reason: reason}
end
def message(%__MODULE__{authenticator: authenticator, reason: reason}) do
"#{authenticator}: #{reason}"
end
end
end
|
lib/apiac/authenticator.ex
| 0.921101
| 0.5
|
authenticator.ex
|
starcoder
|
defmodule Protobuf.Wire do
@moduledoc """
Utilities to convert data from wire format to protobuf and back.
"""
alias Protobuf.Wire.{Varint, Zigzag}
require Logger
@type proto_type ::
:int32
| :int64
| :fixed32
| :fixed64
| :uint32
| :uint64
| :sfixed32
| :sfixed64
| :sint32
| :sint64
| :float
| :double
| :bool
| :string
| :bytes
| {:enum, any}
@type proto_float :: :infinity | :negative_infinity | :nan | float
@type proto_value :: binary | integer | boolean | proto_float | atom
@sint32_range -0x80000000..0x7FFFFFFF
@sint64_range -0x8000000000000000..0x7FFFFFFFFFFFFFFF
@uint32_range 0..0xFFFFFFFF
@uint64_range 0..0xFFFFFFFFFFFFFFFF
@spec from_proto(proto_type, proto_value) :: iodata
# Returns improper list, but still valid iodata.
def from_proto(type, binary) when type in [:string, :bytes] do
len = binary |> IO.iodata_length() |> Varint.encode()
len ++ binary
end
def from_proto(:int32, n) when n in @sint32_range, do: Varint.encode(n)
def from_proto(:int64, n) when n in @sint64_range, do: Varint.encode(n)
def from_proto(:uint32, n) when n in @uint32_range, do: Varint.encode(n)
def from_proto(:uint64, n) when n in @uint64_range, do: Varint.encode(n)
def from_proto(:bool, true), do: Varint.encode(1)
def from_proto(:bool, false), do: Varint.encode(0)
def from_proto({:enum, enum}, key) when is_atom(key), do: Varint.encode(enum.value(key))
def from_proto({:enum, _}, n) when is_integer(n), do: Varint.encode(n)
def from_proto(:float, :infinity), do: [0, 0, 128, 127]
def from_proto(:float, :negative_infinity), do: [0, 0, 128, 255]
def from_proto(:float, :nan), do: [0, 0, 192, 127]
def from_proto(:float, n), do: <<n::32-float-little>>
def from_proto(:double, :infinity), do: [0, 0, 0, 0, 0, 0, 240, 127]
def from_proto(:double, :negative_infinity), do: [0, 0, 0, 0, 0, 0, 240, 255]
def from_proto(:double, :nan), do: [1, 0, 0, 0, 0, 0, 248, 127]
def from_proto(:double, n), do: <<n::64-float-little>>
def from_proto(:sint32, n) when n in @sint32_range, do: Varint.encode(Zigzag.encode(n))
def from_proto(:sint64, n) when n in @sint64_range, do: Varint.encode(Zigzag.encode(n))
def from_proto(:fixed32, n) when n in @uint32_range, do: <<n::32-little>>
def from_proto(:fixed64, n) when n in @uint64_range, do: <<n::64-little>>
def from_proto(:sfixed32, n) when n in @sint32_range, do: <<n::32-signed-little>>
def from_proto(:sfixed64, n) when n in @sint64_range, do: <<n::64-signed-little>>
def from_proto(type, n) do
raise Protobuf.TypeEncodeError, message: "#{inspect(n)} is invalid for type #{type}"
end
@spec to_proto(proto_type, binary | integer) :: proto_value
def to_proto(type, val) when type in [:string, :bytes], do: val
def to_proto(:int32, val) do
<<n::signed-integer-32>> = <<val::32>>
n
end
def to_proto(:int64, val) do
<<n::signed-integer-64>> = <<val::64>>
n
end
def to_proto(:uint32, val) do
<<n::unsigned-integer-32>> = <<val::32>>
n
end
def to_proto(:uint64, val) do
<<n::unsigned-integer-64>> = <<val::64>>
n
end
def to_proto(:bool, val), do: val != 0
def to_proto({:enum, enum}, val) do
enum.key(val)
rescue
FunctionClauseError ->
Logger.warn("unknown enum value #{val} when decoding for #{inspect(enum)}")
val
end
def to_proto(:float, <<n::little-float-32>>), do: n
# little endianness, should be 0b0_11111111_000000000...
def to_proto(:float, <<0, 0, 0b1000_0000::8, 0b01111111::8>>), do: :infinity
# little endianness, should be 0b1_11111111_000000000...
def to_proto(:float, <<0, 0, 0b1000_0000::8, 0b11111111::8>>), do: :negative_infinity
# should be 0b*_11111111_not_zero...
def to_proto(:float, <<a::16, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, b::7, _::1, 0b1111111::7>>) when a != 0 or b != 0,
do: :nan
def to_proto(:double, <<n::little-float-64>>), do: n
# little endianness, should be 0b0_11111111111_000000000...
def to_proto(:double, <<0::48, 0b1111::4, fc00:e968:6179::de52:7100, 0b01111111::8>>), do: :infinity
# little endianness, should be 0b1_11111111111_000000000...
def to_proto(:double, <<0::48, 0b1111::4, fc00:e968:6179::de52:7100, 0b11111111::8>>), do: :negative_infinity
def to_proto(:double, <<a::48, 0b1111::4, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, _::1, 0b1111111::7>>) when a != 0 or b != 0,
do: :nan
def to_proto(type, val) when type in [:sint32, :sint64], do: Zigzag.decode(val)
def to_proto(:fixed32, <<n::little-32>>), do: n
def to_proto(:fixed64, <<n::little-64>>), do: n
def to_proto(:sfixed32, <<n::little-signed-32>>), do: n
def to_proto(:sfixed64, <<n::little-signed-64>>), do: n
def to_proto(type, val) do
raise Protobuf.DecodeError, message: "can't decode #{inspect(val)} into type #{type}"
end
end
|
lib/protobuf/wire.ex
| 0.815012
| 0.456107
|
wire.ex
|
starcoder
|
defmodule Livebook.Runtime.Dependencies do
@moduledoc false
@doc """
Finds or adds a `Mix.install/2` call to `code` and modifies it to
include the given Mix deps.
"""
@spec add_mix_deps(String.t(), list(tuple())) :: {:ok, String.t()} | {:error, String.t()}
def add_mix_deps(code, deps) do
with {:ok, ast, comments} <- string_to_quoted_with_comments(code),
{:ok, ast} <- insert_deps(ast, deps),
do: {:ok, format(ast, comments)}
end
defp string_to_quoted_with_comments(code) do
try do
to_quoted_opts = [
literal_encoder: &{:ok, {:__block__, &2, [&1]}},
token_metadata: true,
unescape: false
]
{ast, comments} = Code.string_to_quoted_with_comments!(code, to_quoted_opts)
{:ok, ast, comments}
rescue
error -> {:error, Exception.format(:error, error)}
end
end
defp insert_deps(ast, deps) do
with :error <- update_install(ast, deps) do
dep_nodes = Enum.map(deps, &dep_node/1)
install_node =
{{:., [], [{:__aliases__, [], [:Mix]}, :install]}, [],
[{:__block__, [newlines: 1], [dep_nodes]}]}
{:ok, prepend_node(ast, install_node)}
end
end
defp format(ast, comments) do
ast
|> Code.quoted_to_algebra(comments: comments)
|> Inspect.Algebra.format(90)
|> IO.iodata_to_binary()
end
defp prepend_node({:__block__, meta, nodes}, node) do
{:__block__, meta, [node | nodes]}
end
defp prepend_node(ast, node) do
{:__block__, [], [node, ast]}
end
defp update_install(
{{:., _, [{:__aliases__, _, [:Mix]}, :install]} = target, meta1,
[{:__block__, meta2, [dep_nodes]} | args]},
deps
) do
new_dep_nodes = for dep <- deps, not has_dep?(dep_nodes, dep), do: dep_node(dep)
{:ok, {target, meta1, [{:__block__, meta2, [dep_nodes ++ new_dep_nodes]} | args]}}
end
defp update_install({:__block__, meta, nodes}, deps) do
{nodes, found} =
Enum.map_reduce(nodes, _found = false, fn
node, false ->
case update_install(node, deps) do
{:ok, node} -> {node, true}
_ -> {node, false}
end
node, true ->
{node, true}
end)
if found do
{:ok, {:__block__, meta, nodes}}
else
:error
end
end
defp update_install(_node, _deps), do: :error
defp has_dep?(deps, dep) do
name = elem(dep, 0)
Enum.any?(deps, fn
{:__block__, _, [{{:__block__, _, [^name]}, _}]} -> true
{:{}, _, [{:__block__, _, [^name]} | _]} -> true
_ -> false
end)
end
defp dep_node(dep), do: {:__block__, [], [Macro.escape(dep)]}
@doc """
Parses a plain Elixir term from its string representation.
## Examples
iex> Livebook.Runtime.Dependencies.parse_term(~s|{:jason, "~> 1.3.0"}|)
{:ok, {:jason, "~> 1.3.0"}}
iex> Livebook.Runtime.Dependencies.parse_term(~s|{:jason, "~> 1.3.0", runtime: false, meta: 'data'}|)
{:ok, {:jason, "~> 1.3.0", runtime: false, meta: 'data'}}
iex> Livebook.Runtime.Dependencies.parse_term(~s|%{name: "Jake", numbers: [1, 2, 3.4]}|)
{:ok, %{name: "Jake", numbers: [1, 2, 3.4]}}
iex> Livebook.Runtime.Dependencies.parse_term(~s|{:call, Enum.count([])}|)
:error
iex> Livebook.Runtime.Dependencies.parse_term(~s|Enum|)
:error
"""
@spec parse_term(String.t()) :: {:ok, term()} | :error
def parse_term(string) do
case Code.string_to_quoted(string) do
{:ok, ast} -> unescape_term(ast)
{:error, _} -> :error
end
end
defp unescape_term(node)
when is_atom(node)
when is_integer(node)
when is_float(node)
when is_binary(node),
do: {:ok, node}
defp unescape_term([]), do: {:ok, []}
defp unescape_term([head | tail]) do
with {:ok, head} <- unescape_term(head),
{:ok, tail} <- unescape_term(tail),
do: {:ok, [head | tail]}
end
defp unescape_term({left, right}) do
with {:ok, left} <- unescape_term(left),
{:ok, right} <- unescape_term(right),
do: {:ok, {left, right}}
end
defp unescape_term({:{}, _, nodes}) do
with {:ok, terms} <- unescape_term(nodes), do: {:ok, List.to_tuple(terms)}
end
defp unescape_term({:%{}, _, nodes}) do
with {:ok, terms} <- unescape_term(nodes), do: {:ok, Map.new(terms)}
end
defp unescape_term(_node), do: :error
@doc """
Implements `Livebook.Runtime.search_packages/3` on top of
`search_hex/2`.
"""
@spec search_packages_on_hex(pid(), String.t()) :: reference()
def search_packages_on_hex(send_to, search) do
ref = make_ref()
Task.Supervisor.start_child(Livebook.TaskSupervisor, fn ->
response = search_hex(search)
send(send_to, {:runtime_search_packages_response, ref, response})
end)
ref
end
@doc """
Implements `Livebook.Runtime.search_packages/3` by searching
through the given list of packages.
"""
@spec search_packages_in_list(
list(Livebook.Runtime.package()),
pid(),
String.t()
) :: reference()
def search_packages_in_list(packages, send_to, search) do
ref = make_ref()
packages = Enum.filter(packages, &String.starts_with?(&1.name, search))
send(send_to, {:runtime_search_packages_response, ref, {:ok, packages}})
ref
end
@doc """
Searches for packages on Hex.
## Options
* `:api_url` - the base URL for Hex API requests. Optional
"""
@spec search_hex(String.t(), keyword()) :: Livebook.Runtime.search_packages_response()
def search_hex(search, opts \\ [])
def search_hex("", _opts), do: {:ok, []}
def search_hex(search, opts) do
api_url = opts[:api_url] || "https://hex.pm/api"
params = %{"search" => "name:#{search}*", "sort" => "downloads"}
url = api_url <> "/packages?" <> URI.encode_query(params)
case Livebook.Utils.HTTP.request(:get, url) do
{:ok, status, _headers, body} ->
with 200 <- status, {:ok, packages} <- Jason.decode(body) do
packages = Enum.map(packages, &parse_package/1)
{:ok, packages}
else
_ -> {:error, "unexpected response"}
end
{:error, reason} ->
{:error, "failed to make a request, reason: #{inspect(reason)}"}
end
end
defp parse_package(package) do
{:ok, dependency} = parse_term(package["configs"]["mix.exs"])
%{
name: package["name"],
version: package["latest_stable_version"] || package["latest_version"],
description: package["meta"]["description"],
url: package["html_url"],
dependency: dependency
}
end
end
|
lib/livebook/runtime/dependencies.ex
| 0.83363
| 0.439807
|
dependencies.ex
|
starcoder
|
defmodule Grizzly.CommandClass.TimeParameters.Set do
@moduledoc """
Command module for working with TIME_PARAMETERS SET command
command options:
* `:year` - Year in the usual Gregorian calendar
* `:month` - Month of the year between 01 (January) and 12 (December)
* `:day` - Day of the month between 01 and 31
* `:hour` - Number of complete hours that have passed since midnight (00..23) in UTC
* `:minute` - Number of complete minutes that have passed since the start of the hour (00..59) in UTC
* `:second` - Number of complete seconds since the start of the minute (00..59) in UTC
* `:seq_number` - The sequence number for the Z/IP Packet
* `:retries` - The number times to retry to send the command (default 2)
"""
@behaviour Grizzly.Command
alias Grizzly.Packet
alias Grizzly.Command.{EncodeError, Encoding}
alias Grizzly.CommandClass.TimeParameters
@type t :: %__MODULE__{
seq_number: Grizzly.seq_number(),
retries: non_neg_integer(),
value: TimeParameters.date_time()
}
@type opt ::
{:seq_number, Grizzly.seq_number()}
| {:retries, non_neg_integer()}
| {:value, TimeParameters.date_time()}
defstruct seq_number: nil, retries: 2, value: nil
@spec init([opt]) :: {:ok, t}
def init(opts) do
{:ok, struct(__MODULE__, opts)}
end
@spec encode(t) :: {:ok, binary} | {:error, EncodeError.t()}
def encode(
%__MODULE__{
value: %{
year: year,
month: month,
day: day,
hour: hour,
minute: minute,
second: second
},
seq_number: seq_number
} = command
) do
with {:ok, _encoded} <-
Encoding.encode_and_validate_args(
command,
%{
year: {:bytes, 2},
month: {:range, 1, 12},
day: {:range, 1, 31},
hour: {:range, 0, 24},
minute: {:range, 0, 59},
second: {:range, 0, 59}
},
[:value]
) do
binary =
Packet.header(seq_number) <>
<<
0x8B,
0x01,
year::size(16),
month::size(8),
day::size(8),
hour::size(8),
minute::size(8),
second::size(8)
>>
{:ok, binary}
end
end
@spec handle_response(t, Packet.t()) ::
{:continue, t}
| {:done, {:error, :nack_response}}
| {:done, TimeParameters.date_time()}
| {:retry, t}
def handle_response(
%__MODULE__{seq_number: seq_number} = _command,
%Packet{
seq_number: seq_number,
types: [:ack_response]
}
) do
{:done, :ok}
end
def handle_response(
%__MODULE__{seq_number: seq_number, retries: 0},
%Packet{
seq_number: seq_number,
types: [:nack_response]
}
) do
{:done, {:error, :nack_response}}
end
def handle_response(
%__MODULE__{seq_number: seq_number, retries: n} = command,
%Packet{
seq_number: seq_number,
types: [:nack_response]
}
) do
{:retry, %{command | retries: n - 1}}
end
def handle_response(
_,
%Packet{
body: %{
command_class: :time_parameters,
command: :report,
value: value
}
}
) do
{:done, {:ok, value}}
end
def handle_response(command, _), do: {:continue, command}
end
|
lib/grizzly/command_class/time_parameters/set.ex
| 0.888463
| 0.595787
|
set.ex
|
starcoder
|
defmodule TryNx do
@moduledoc """
Documentation for `TryNx`.
"""
@doc """
Train model using MSE and SGD
Reference: https://jovian.ai/aakashns/02-linear-regression (pytorch version)
## Examples
iex> TryNx.now()
:ok
"""
require Logger
import Nx.Defn
@max_epoch 10000
@max_loss 0.5
def now do
inputs =
Nx.tensor(
[
[73, 67, 43],
[91, 88, 64],
[87, 134, 58],
[102, 43, 37],
[69, 96, 70]
],
type: {:f, 32}
)
targets =
Nx.tensor(
[
[56, 70],
[81, 101],
[119, 133],
[22, 37],
[103, 119]
],
type: {:f, 32}
)
weights = Nx.random_uniform({2, 3})
biases = Nx.random_uniform({2})
{preds, weights, biases, epoch, loss} = train(inputs, targets, weights, biases)
Logger.info("""
Training result:
epoch: #{inspect(epoch)}
loss: #{inspect(loss)}
preds: #{inspect(preds)}
targets: #{inspect(targets)}}
weights: #{inspect(biases)}
biases: #{inspect(weights)}
""")
:ok
end
def train(inputs, targets, weights, biases, epoch \\ 0, loss \\ :infinity)
def train(inputs, targets, weights, biases, epoch, loss) when epoch < @max_epoch or loss < @max_loss do
Logger.info("""
Training step:
epoch: #{inspect(epoch)}
loss: #{inspect(loss)}
weights: #{inspect(weights)}
biases: #{inspect(biases)}
""")
{loss, {weight_grads, bias_grads}} = eval(inputs, targets, weights, biases)
loss = Nx.to_scalar(loss)
weights = sgd(weights, weight_grads)
biases = sgd(biases, bias_grads)
train(inputs, targets, weights, biases, epoch + 1, loss)
end
def train(inputs, _targets, weights, biases, epoch, loss),
do: {model(inputs, weights, biases), weights, biases, epoch, loss}
defn eval(inputs, targets, weights, biases) do
loss =
model(inputs, weights, biases)
|> mse(targets)
{loss, grad({weights, biases}, loss)}
end
defn model(inputs, weights, biases) do
inputs
|> Nx.dot(Nx.transpose(weights))
|> Nx.add(biases)
end
defn mse(left, right) do
left
|> Nx.subtract(right)
|> Nx.power(2)
|> Nx.mean()
end
defn sgd(tensor, grads, step \\ 1.0e-5) do
tensor
|> Nx.subtract(Nx.multiply(grads, step))
end
end
|
lib/try_nx.ex
| 0.937854
| 0.701662
|
try_nx.ex
|
starcoder
|
defmodule Helios.Router.InvalidSpecError do
defexception message: "invalid route specification"
end
defmodule Helios.Router.Utils do
@moduledoc false
@doc """
Generates a representation that will only match routes
according to the given `spec`.
If a non-binary spec is given, it is assumed to be
custom match arguments and they are simply returned.
## Examples
iex> Helios.Router.Utils.build_path_match("/foo/:id")
{[:id], ["foo", {:id, [], nil}]}
"""
def build_path_match(spec, context \\ nil) when is_binary(spec) do
build_path_match(split(spec), context, [], [])
end
@doc """
Builds a list of path param names and var match pairs that can bind
to dynamic path segment values. Excludes params with underscores;
otherwise, the compiler will warn about used underscored variables
when they are unquoted in the macro.
## Examples
iex> Helios.Router.Utils.build_path_params_match([:id])
[{"id", {:id, [], nil}}]
"""
def build_path_params_match(vars) do
vars
|> Enum.map(fn v -> {Atom.to_string(v), Macro.var(v, nil)} end)
|> Enum.reject(fn v -> match?({"_" <> _var, _macro}, v) end)
end
@doc """
Splits the given path into several segments.
It ignores both leading and trailing slashes in the path.
## Examples
iex> Helios.Router.Utils.split("/foo/bar")
["foo", "bar"]
iex> Helios.Router.Utils.split("/:id/*")
[":id", "*"]
iex> Helios.Router.Utils.split("/foo//*_bar")
["foo", "*_bar"]
"""
def split(bin) when is_binary(bin) do
for segment when segment != "" <- String.split(bin, "/"), do: segment
end
defp build_path_match([h | t], context, vars, acc) do
handle_segment_match(segment_match(h, "", context), t, context, vars, acc)
end
defp build_path_match([], _context, vars, acc) do
{vars |> Enum.uniq() |> Enum.reverse(), Enum.reverse(acc)}
end
# Handle each segment match. They can either be a
# :literal ("foo"), an :identifier (":bar") or a :glob ("*path")
defp handle_segment_match({:literal, literal}, t, context, vars, acc) do
build_path_match(t, context, vars, [literal | acc])
end
defp handle_segment_match({:identifier, identifier, expr}, t, context, vars, acc) do
build_path_match(t, context, [identifier | vars], [expr | acc])
end
defp handle_segment_match({:glob, _identifier, _expr}, t, _context, _vars, _acc) when t != [] do
raise Helios.Router.InvalidSpecError, message: "cannot have a *glob followed by other segments"
end
defp handle_segment_match({:glob, identifier, expr}, _t, context, vars, [hs | ts]) do
acc = [{:|, [], [hs, expr]} | ts]
build_path_match([], context, [identifier | vars], acc)
end
defp handle_segment_match({:glob, identifier, expr}, _t, context, vars, _) do
{vars, expr} = build_path_match([], context, [identifier | vars], [expr])
{vars, hd(expr)}
end
# In a given segment, checks if there is a match.
defp segment_match(":" <> argument, buffer, context) do
identifier = binary_to_identifier(":", argument)
expr =
quote_if_buffer(identifier, buffer, context, fn var ->
quote do: unquote(buffer) <> unquote(var)
end)
{:identifier, identifier, expr}
end
defp segment_match("*" <> argument, buffer, context) do
underscore = {:_, [], context}
identifier = binary_to_identifier("*", argument)
expr =
quote_if_buffer(identifier, buffer, context, fn var ->
quote do: [unquote(buffer) <> unquote(underscore) | unquote(underscore)] = unquote(var)
end)
{:glob, identifier, expr}
end
defp segment_match(<<h, t::binary>>, buffer, context) do
segment_match(t, buffer <> <<h>>, context)
end
defp segment_match(<<>>, buffer, _context) do
{:literal, buffer}
end
defp quote_if_buffer(identifier, "", context, _fun) do
{identifier, [], context}
end
defp quote_if_buffer(identifier, _buffer, context, fun) do
fun.({identifier, [], context})
end
defp binary_to_identifier(prefix, <<letter, _::binary>> = binary)
when letter in ?a..?z or letter == ?_ do
if binary =~ ~r/^\w+$/ do
String.to_atom(binary)
else
raise Helios.Router.InvalidSpecError,
message: "#{prefix}identifier in routes must be made of letters, numbers and underscores"
end
end
defp binary_to_identifier(prefix, _) do
raise Helios.Router.InvalidSpecError,
message: "#{prefix} in routes must be followed by lowercase letters or underscore"
end
end
|
lib/helios/router/utils.ex
| 0.862033
| 0.417301
|
utils.ex
|
starcoder
|
defmodule Surface.Catalogue.Example do
@moduledoc """
Experimental LiveView to create examples for catalogue tools.
## Options
Besides the buit-in options provided by the LiveView itself, an Example also
provides the following options:
* `subject` - Required. The target component of the Example.
* `height` - Required. The height of the Example.
* `catalogue` - Optional. A module that implements the `Surface.Catalogue`
providing additional information to the catalogue tool. Usually required
if you want to share your components as a library.
* `body` - Optional. Sets/overrides the attributes of the the Example's body tag.
Useful to set a different background or padding.
* `title` - Optional. The title of the example.
* `direction` - Optional. Defines how the example + code boxes should be displayed.
Available values are "horizontal" or "vertical". Default is "horizontal" (side-by-side).
* `code_perc` - Optional. When the direction is "horizontal", defines the percentage of
the total width that the code box should take. Default is `50`. Note: This configuration
has no effect when direction is "vertical".
"""
defmacro __using__(opts) do
subject = Surface.Catalogue.fetch_subject!(opts, __MODULE__, __CALLER__)
quote do
use Surface.LiveView, unquote(opts)
alias unquote(subject)
require Surface.Catalogue.Data, as: Data
@config unquote(opts)
@before_compile unquote(__MODULE__)
import Surface, except: [sigil_F: 2]
defmacrop sigil_F({:<<>>, _meta, [string]} = ast, opts) do
Module.put_attribute(__CALLER__.module, :code, string)
quote do
Surface.sigil_F(unquote(ast), unquote(opts))
end
end
end
end
defmacro __before_compile__(env) do
config = Module.get_attribute(env.module, :config)
subject = Keyword.fetch!(config, :subject)
code = Module.get_attribute(env.module, :code)
quote do
@moduledoc catalogue: [
type: :example,
subject: unquote(subject),
config: unquote(config),
code: unquote(code)
]
end
end
end
|
lib/surface/catalogue/example.ex
| 0.909561
| 0.793986
|
example.ex
|
starcoder
|
defmodule SFTPToolkit.Download do
@moduledoc """
Module containing functions that ease downloading data from the SFTP server.
"""
use Bunch
@default_operation_timeout 5000
@default_chunk_size 32768
@default_remote_mode [:read, :binary]
@default_local_mode [:write, :binary]
@doc """
Downloads a single file by reading it in chunks to avoid loading whole
file into memory as `:ssh_sftp.read_file/3` does by default.
## Arguments
Expects the following arguments:
* `sftp_channel_pid` - PID of the already opened SFTP channel,
* `remote_path` - remote path to the file on the SFTP server,
* `local_path` - local path to the file,
* `options` - additional options, see below.
## Options
* `operation_timeout` - SFTP operation timeout (it is a timeout
per each SFTP operation, not total timeout), defaults to 5000 ms,
* `chunk_size` - chunk size in bytes, defaults to 32KB,
* `remote_mode` - mode used while opening the remote file, defaults
to `[:read, :binary]`, see `:ssh_sftp.open/3` for possible
values,
* `local_mode` - mode used while opening the local file, defaults
to `[:write, :binary]`, see `File.open/2` for possible
values.
## Return values
On success returns `:ok`.
On error returns `{:error, reason}`, where `reason` might be one
of the following:
* `{:local_open, info}` - the `File.open/2` on the local file failed,
* `{:remote_open, info}` - the `:ssh_sftp.open/4` on the remote file
failed,
* `{:download, {:read, info}}` - the `IO.binwrite/2` on the local file
failed,
* `{:download, {:write, info}}` - the `:ssh_sftp.read/4` on the remote
file failed,
* `{:local_close, info}` - the `File.close/1` on the local file failed,
* `{:remote_close, info}` - the `:ssh_sftp.close/2` on the remote file
failed.
"""
@spec download_file(pid, Path.t(), Path.t(),
operation_timeout: timeout,
chunk_size: pos_integer,
remote_mode: [:read | :write | :creat | :trunc | :append | :binary],
local_mode: [File.mode()]
) :: :ok | {:error, any}
def download_file(sftp_channel_pid, remote_path, local_path, options \\ []) do
chunk_size = Keyword.get(options, :chunk_size, @default_chunk_size)
operation_timeout = Keyword.get(options, :operation_timeout, @default_operation_timeout)
remote_mode = Keyword.get(options, :remote_mode, @default_remote_mode)
local_mode = Keyword.get(options, :local_mode, @default_local_mode)
withl remote_open:
{:ok, remote_handle} <-
:ssh_sftp.open(sftp_channel_pid, remote_path, remote_mode, operation_timeout),
local_open: {:ok, local_handle} <- File.open(local_path, local_mode),
download:
:ok <-
do_download_file(
sftp_channel_pid,
local_handle,
remote_handle,
chunk_size,
operation_timeout
),
remote_close:
:ok <- :ssh_sftp.close(sftp_channel_pid, remote_handle, operation_timeout),
local_close: :ok <- File.close(local_handle) do
:ok
else
local_open: {:error, reason} -> {:error, {:local_open, reason}}
remote_open: {:error, reason} -> {:error, {:remote_open, reason}}
download: {:error, reason} -> {:error, {:download, reason}}
remote_close: {:error, reason} -> {:error, {:remote_close, reason}}
local_close: {:error, reason} -> {:error, {:local_close, reason}}
end
end
defp do_download_file(
sftp_channel_pid,
local_handle,
remote_handle,
chunk_size,
operation_timeout
) do
case :ssh_sftp.read(sftp_channel_pid, remote_handle, chunk_size, operation_timeout) do
:eof ->
:ok
{:error, reason} ->
{:error, {:read, reason}}
{:ok, data} ->
case IO.binwrite(local_handle, data) do
:ok ->
do_download_file(
sftp_channel_pid,
local_handle,
remote_handle,
chunk_size,
operation_timeout
)
{:error, reason} ->
{:error, {:write, reason}}
end
end
end
end
|
lib/sftp_toolkit/download.ex
| 0.7641
| 0.542984
|
download.ex
|
starcoder
|
defmodule Day11 do
@moduledoc """
Advent of Code 2019
Day 11: Space Police
"""
alias Day11.{Part1, Part2}
def get_program() do
Path.join(__DIR__, "inputs/day11.txt")
|> File.read!()
|> String.trim()
|> String.split(",")
|> Enum.map(&String.to_integer/1)
end
def execute() do
program = get_program()
IO.puts("Part 1: #{Part1.run(program)}")
IO.puts("Part 2:\n#{Part2.run(program)}")
end
end
defmodule Day11.Part1 do
def run(program) do
GenServer.start_link(Intcode, program, name: Computer11)
panels = run_painting_robot()
GenServer.stop(Computer11)
map_size(panels)
end
# black = 0, white = 1
@cw_rotations %{:up => :right, :right => :down, :down => :left, :left => :up}
@ccw_rotations %{:up => :left, :left => :down, :down => :right, :right => :up}
def run_painting_robot(panels \\ %{}, {x, y} = coords \\ {0, 0}, direction \\ :up) do
input = Map.get(panels, coords, 0)
case GenServer.call(Computer11, {:run, [input]}) do
{:exit, _} ->
panels
{:output, color} ->
panels = Map.put(panels, coords, color)
direction =
case GenServer.call(Computer11, {:run, []}) do
{_, 0} -> @cw_rotations[direction]
{_, 1} -> @ccw_rotations[direction]
end
coords =
case direction do
:up -> {x, y + 1}
:right -> {x + 1, y}
:down -> {x, y - 1}
:left -> {x - 1, y}
end
run_painting_robot(panels, coords, direction)
end
end
end
defmodule Day11.Part2 do
alias Day11.Part1
def run(program) do
GenServer.start_link(Intcode, program, name: Computer11)
panels = Part1.run_painting_robot(%{{0, 0} => 1})
GenServer.stop(Computer11)
format_panels(panels)
end
def format_panels(panels) do
dimensions = get_dimensions(panels)
for y <- dimensions.y_max..dimensions.y_min do
for x <- dimensions.x_max..dimensions.x_min do
case Map.get(panels, {x, y}, 0) do
0 -> " "
1 -> "\u2591\u2591"
end
end
|> Enum.join()
end
|> Enum.join("\n")
end
defp get_dimensions(panels) do
panels_list = Map.to_list(panels)
x_coords = Enum.map(panels_list, &elem(elem(&1, 0), 0))
y_coords = Enum.map(panels_list, &elem(elem(&1, 0), 1))
%{
x_min: Enum.min(x_coords),
x_max: Enum.max(x_coords),
y_min: Enum.min(y_coords),
y_max: Enum.max(y_coords)
}
end
end
|
lib/day11.ex
| 0.636918
| 0.458591
|
day11.ex
|
starcoder
|
defmodule BroadwayCloudPubSub.Producer do
@moduledoc """
A GenStage producer that continuously receives messages from a Google Cloud Pub/Sub
topic and acknowledges them after being successfully processed.
By default this producer uses `BroadwayCloudPubSub.GoogleApiClient` to talk to Cloud
Pub/Sub, but you can provide your client by implementing the `BroadwayCloudPubSub.Client`
behaviour.
## Options using `BroadwayCloudPubSub.GoogleApiClient`
* `:subscription` - Required. The name of the subscription.
Example: "projects/my-project/subscriptions/my-subscription"
* `:max_number_of_messages` - Optional. The maximum number of messages to be fetched
per request. Default is `10`.
* `:return_immediately` - Optional. If this field set to true, the system will respond immediately
even if it there are no messages available to return in the Pull response. Otherwise, the system
may wait (for a bounded amount of time) until at least one message is available, rather than
returning no messages. Default is `nil`.
* `:scope` - Optional. A string representing the scope or scopes to use when fetching
an access token. Default is `"https://www.googleapis.com/auth/pubsub"`.
Note: The `:scope` option only applies to the default token generator.
* `:token_generator` - Optional. An MFArgs tuple that will be called before each request
to fetch an authentication token. It should return `{:ok, String.t()} | {:error, any()}`.
Default generator uses `Goth.Token.for_scope/1` with `"https://www.googleapis.com/auth/pubsub"`.
## Additional options
These options applies to all producers, regardless of client implementation:
* `:client` - Optional. A module that implements the `BroadwayCloudPubSub.Client`
behaviour. This module is responsible for fetching and acknowledging the
messages. Pay attention that all options passed to the producer will be forwarded
to the client. It's up to the client to normalize the options it needs. Default
is `BroadwayCloudPubSub.GoogleApiClient`.
* `:receive_interval` - Optional. The duration (in milliseconds) for which the producer
waits before making a request for more messages. Default is 5000.
### Example
Broadway.start_link(MyBroadway,
name: MyBroadway,
producers: [
default: [
module: {BroadwayCloudPubSub.Producer,
subscription: "projects/my-project/subscriptions/my_subscription"
}
]
]
)
The above configuration will set up a producer that continuously receives messages
from `"projects/my-project/subscriptions/my_subscription"` and sends them downstream.
"""
use GenStage
@default_receive_interval 5000
@impl true
def init(opts) do
client = opts[:client] || BroadwayCloudPubSub.GoogleApiClient
receive_interval = opts[:receive_interval] || @default_receive_interval
case client.init(opts) do
{:error, message} ->
raise ArgumentError, "invalid options given to #{inspect(client)}.init/1, " <> message
{:ok, opts} ->
{:producer,
%{
demand: 0,
receive_timer: nil,
receive_interval: receive_interval,
client: {client, opts}
}}
end
end
@impl true
def handle_demand(incoming_demand, %{demand: demand} = state) do
handle_receive_messages(%{state | demand: demand + incoming_demand})
end
@impl true
def handle_info(:receive_messages, state) do
handle_receive_messages(%{state | receive_timer: nil})
end
@impl true
def handle_info(_, state) do
{:noreply, [], state}
end
defp handle_receive_messages(%{receive_timer: nil, demand: demand} = state) when demand > 0 do
messages = receive_messages_from_pubsub(state, demand)
new_demand = demand - length(messages)
receive_timer =
case {messages, new_demand} do
{[], _} -> schedule_receive_messages(state.receive_interval)
{_, 0} -> nil
_ -> schedule_receive_messages(0)
end
{:noreply, messages, %{state | demand: new_demand, receive_timer: receive_timer}}
end
defp handle_receive_messages(state) do
{:noreply, [], state}
end
defp receive_messages_from_pubsub(state, total_demand) do
%{client: {client, opts}} = state
client.receive_messages(total_demand, opts)
end
defp schedule_receive_messages(interval) do
Process.send_after(self(), :receive_messages, interval)
end
end
|
lib/broadway_cloud_pub_sub/producer.ex
| 0.922032
| 0.412619
|
producer.ex
|
starcoder
|
defmodule Militerm.Services.MML do
@moduledoc """
Manages the rendering handlers for MML tags for different device contexts.
"""
def start_link() do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
def child_spec(opts \\ []) do
%{
id: {:global, __MODULE__},
start: {__MODULE__, :start_link, []},
type: :worker,
restart: :permanent,
shutdown: 500
}
end
@doc """
Registers a set of tag handlers for a given device context.
## Examples
Given functions in the `Example.Telnet.Description` module with the
signatures `room(attributes, children, bindings, device)` and
`env(attributes, children, bindings, device, sense)`:
iex> MML.register_tags(Example.Telnet.Description, :telnet, [:room, {"env:sight", {:env, :sight}}])
:ok
"""
def register_tags(module, device, tag_map) do
GenServer.call(__MODULE__, {:register_tags, module, device, tag_map})
end
def tag_handler(tag, device) do
case :ets.lookup(__MODULE__, {tag, device}) do
[{_, mfa}] ->
{:ok, mfa}
_ ->
case :ets.lookup(__MODULE__, {tag, :any}) do
[{_, mfa}] -> {:ok, mfa}
_ -> :error
end
end
end
def render({parse, bindings}, device) do
parse
|> Enum.map(fn item -> render_item(item, bindings, device) end)
end
def render_item(string, _, _) when is_binary(string), do: string
def render_item({:tag, attributes, nodes}, bindings, device) do
with {:ok, name} <- Keyword.fetch(attributes, :name),
{:ok, {module, function, args}} <- tag_handler(name, device) do
apply(module, function, [attributes, nodes, bindings, device] ++ args)
else
_ ->
render({nodes, bindings}, device)
end
end
@impl true
def init(_) do
:ets.new(__MODULE__, [:named_table])
{:ok, nil}
end
def handle_call({:register_tags, module, device, tag_map}, _from, state) do
{:reply, insert_tag_handlers(module, device, tag_map), state}
end
def insert_tag_handlers(_, _, []), do: :ok
def insert_tag_handlers(module, device, map) when is_map(map) do
insert_tag_handlers(module, device, Map.to_list(map))
end
def insert_tag_handlers(module, device, [{tag, {fctn, args}} | rest]) do
:ets.insert(__MODULE__, {{to_string(tag), device}, {module, fctn, to_list(args)}})
insert_tag_handlers(module, device, rest)
end
def insert_tag_handlers(module, device, [{tag, fctn} | rest]) do
:ets.insert(__MODULE__, {{to_string(tag), device}, {module, fctn, []}})
insert_tag_handlers(module, device, rest)
end
def insert_tag_handlers(module, device, [tag | rest]) when is_atom(tag) do
:ets.insert(__MODULE__, {{to_string(tag), device}, {module, tag, []}})
insert_tag_handlers(module, device, rest)
end
def insert_tag_handlers(module, device, [tag | rest]) when is_binary(tag) do
:ets.insert(__MODULE__, {{tag, device}, {module, String.to_existing_atom(tag), []}})
insert_tag_handlers(module, device, rest)
end
def to_list(list) when is_list(list), do: list
def to_list(not_list), do: [not_list]
end
|
lib/militerm/services/mml.ex
| 0.761893
| 0.419826
|
mml.ex
|
starcoder
|
defmodule ExKpl do
@moduledoc """
Elixir implementation of the Kinesis Producer Library record aggregator.
This is a port of the Erlang implementation included in [adroll/erlmld](https://github.com/AdRoll/erlmld)
## Basic usage:
iex> {_, aggregator} = ExKpl.add(ExKpl.new(), {"partition_key", "data"})
...> ExKpl.finish(aggregator)
{{"partition_key", <<243, 137, 154, 194, 10, 13, 112, 97, 114, 116, 105, 116, 105, 111, 110, 95, 107, 101, 121, 26, 8, 8, 0, 26, 4, 100, 97, 116, 97, 208, 54, 153, 218, 90, 34, 47, 163, 33, 8, 173, 27, 217, 85, 161, 78>>, nil}, %ExKpl{agg_explicit_hash_key: nil, agg_partition_key: nil, agg_size_bytes: 0, explicit_hash_keyset: %ExKpl.Keyset{key_to_index: %{}, rev_keys: []}, num_user_records: 0, partition_keyset: %ExKpl.Keyset{key_to_index: %{}, rev_keys: []}, rev_records: []}}
Typically you will use it like:
case ExKpl.add(aggregator, {partition_key, data}) do
{nil, aggregator} ->
aggregator
{full_record, aggregator} ->
send_record_to_kinesis(full_record)
aggregator
end
You can force the current records to be aggregated with `finish/1,2`
"""
use Bitwise
alias ExKpl.{Proto, Keyset}
require Logger
@type key :: binary() | nil
@type raw_data :: binary()
@type serialized_data :: binary()
@type user_record :: {key(), raw_data(), key()}
@type aggregated_record :: {key(), serialized_data(), key()}
@type new_opts :: [{:max_bytes_per_record, pos_integer()}]
@magic <<243, 137, 154, 194>>
@magic_deflated <<244, 137, 154, 194>>
@max_bytes_per_record bsl(1, 20)
@md5_digest_bytes 16
@empty_record_size %Proto.AggregatedRecord{}
|> Protox.Encode.encode!()
|> IO.iodata_to_binary()
|> byte_size()
defstruct num_user_records: 0,
agg_size_bytes: 0,
agg_partition_key: nil,
agg_explicit_hash_key: nil,
partition_keyset: %Keyset{},
explicit_hash_keyset: %Keyset{},
rev_records: [],
max_bytes_per_record: @max_bytes_per_record
@type t :: %__MODULE__{
num_user_records: non_neg_integer(),
agg_size_bytes: non_neg_integer(),
agg_partition_key: key(),
agg_explicit_hash_key: key(),
partition_keyset: Keyset.t(),
explicit_hash_keyset: Keyset.t(),
rev_records: [binary()],
max_bytes_per_record: pos_integer()
}
@spec new(new_opts()) :: t()
def new(opts \\ []) do
max = Keyword.get(opts, :max_bytes_per_record, @max_bytes_per_record)
%__MODULE__{
max_bytes_per_record: min(max, @max_bytes_per_record)
}
end
@spec count(t()) :: non_neg_integer()
def count(%__MODULE__{num_user_records: count}), do: count
@spec size_bytes(t()) :: non_neg_integer()
def size_bytes(%__MODULE__{agg_size_bytes: size, agg_partition_key: pk}) do
byte_size(@magic) + size + pk_size(pk) + @md5_digest_bytes + @empty_record_size
end
@spec finish(t()) :: {aggregated_record() | nil, t()}
def finish(%__MODULE__{num_user_records: 0} = agg, _), do: {nil, agg}
def finish(
%__MODULE__{agg_partition_key: agg_pk, agg_explicit_hash_key: agg_ehk} = agg,
should_deflate?
) do
agg_record = {agg_pk, serialize_data(agg, should_deflate?), agg_ehk}
{agg_record, new(max_bytes_per_record: agg.max_bytes_per_record)}
end
def finish(agg), do: finish(agg, false)
@spec add(t(), {key(), binary()} | {key(), binary(), key()}) ::
{aggregated_record() | nil, t()}
def add(agg, {partition_key, data}) do
add(agg, {partition_key, data, create_explicit_hash_key(partition_key)})
end
def add(%{max_bytes_per_record: max} = agg, {partition_key, data, explicit_hash_key}) do
case {calc_record_size(agg, partition_key, data, explicit_hash_key), size_bytes(agg)} do
{rec_size, _} when rec_size > max ->
Logger.error(fn -> "input record too large to fit in a single Kinesis record" end)
{nil, agg}
{rec_size, cur_size} when rec_size + cur_size > max ->
{full_record, agg1} = finish(agg)
agg2 = add_record(agg1, partition_key, data, explicit_hash_key, rec_size)
{full_record, agg2}
{rec_size, _} ->
agg1 = add_record(agg, partition_key, data, explicit_hash_key, rec_size)
# FIXME make size calculations more accurate
case size_bytes(agg1) > max - 64 do
true ->
{full_record, agg2} = finish(agg)
agg3 = add_record(agg2, partition_key, data, explicit_hash_key, rec_size)
{full_record, agg3}
false ->
{nil, agg1}
end
end
end
@spec add_all(t(), [user_record()]) :: {[aggregated_record()], t()}
def add_all(agg, records) do
{rev_agg_records, new_agg} =
List.foldl(records, {[], agg}, fn record, {rev_agg_records, agg} ->
case add(agg, record) do
{nil, new_agg} -> {rev_agg_records, new_agg}
{agg_record, new_agg} -> {[agg_record | rev_agg_records], new_agg}
end
end)
{Enum.reverse(rev_agg_records), new_agg}
end
defp add_record(
%__MODULE__{
partition_keyset: pkset,
explicit_hash_keyset: ehkset,
rev_records: rev_records,
num_user_records: num_user_records,
agg_size_bytes: agg_size,
agg_partition_key: agg_pk,
agg_explicit_hash_key: agg_ehk,
max_bytes_per_record: max_bytes_per_record
},
partition_key,
data,
explicit_hash_key,
new_record_size
) do
{pk_index, new_pk_set} = Keyset.get_or_add_key(partition_key, pkset)
{ehk_index, new_ehk_set} = Keyset.get_or_add_key(explicit_hash_key, ehkset)
new_record = %Proto.Record{
partition_key_index: pk_index,
explicit_hash_key_index: ehk_index,
data: data
}
%__MODULE__{
partition_keyset: new_pk_set,
explicit_hash_keyset: new_ehk_set,
rev_records: [new_record | rev_records],
num_user_records: 1 + num_user_records,
agg_size_bytes: new_record_size + agg_size,
agg_partition_key: first_defined(agg_pk, partition_key),
agg_explicit_hash_key: first_defined(agg_ehk, explicit_hash_key),
max_bytes_per_record: max_bytes_per_record
}
end
defp first_defined(nil, second), do: second
defp first_defined(first, _), do: first
defp calc_record_size(
%__MODULE__{partition_keyset: pkset, explicit_hash_keyset: ehkset},
partition_key,
data,
explicit_hash_key
) do
pk_length = byte_size(partition_key)
pk_size =
case Keyset.key?(partition_key, pkset) do
true -> 0
false -> 1 + varint_size(pk_length) + pk_length
end
ehk_size =
case explicit_hash_key do
nil ->
0
_ ->
ehk_length = byte_size(explicit_hash_key)
case Keyset.key?(explicit_hash_key, ehkset) do
true -> 0
false -> 1 + varint_size(ehk_length) + ehk_length
end
end
pk_index_size = 1 + varint_size(Keyset.potential_index(partition_key, pkset))
ehk_index_size =
case explicit_hash_key do
nil -> 0
_ -> 1 + varint_size(Keyset.potential_index(explicit_hash_key, ehkset))
end
data_length = byte_size(data)
data_size = 1 + varint_size(data_length) + data_length
inner_size = pk_index_size + ehk_index_size + data_size
pk_size + ehk_size + 1 + varint_size(inner_size) + inner_size
end
defp varint_size(int) when int >= 0 do
bits = max(num_bits(int, 0), 1)
div(bits + 6, 7)
end
defp num_bits(0, acc), do: acc
defp num_bits(int, acc) when int >= 0 do
num_bits(bsr(int, 1), acc + 1)
end
# Calculate a new explicit hash key based on the input partition key
# (following the algorithm from the original KPL).
# create_explicit_hash_key(_PartitionKey) ->
# Their python implementation [1] is broken compared to the C++
# implementation [2]. But we don't care about EHKs anyway.
# [1] https://github.com/awslabs/kinesis-aggregation/blob/db92620e435ad9924356cda7d096e3c888f0f72f/python/aws_kinesis_agg/aggregator.py#L447-L458
# [2] https://github.com/awslabs/amazon-kinesis-producer/blob/ea1e49218e1a11f1b462662a1db4cc06ddad39bb/aws/kinesis/core/user_record.cc#L36-L45
# FIXME: Implement the actual algorithm from KPL.
defp create_explicit_hash_key(_), do: nil
defp serialize_data(
%__MODULE__{
partition_keyset: pkset,
explicit_hash_keyset: ehkset,
rev_records: records
},
should_deflate?
) do
serialized =
%Proto.AggregatedRecord{
partition_key_table: Keyset.key_list(pkset),
explicit_hash_key_table: Keyset.key_list(ehkset),
records: Enum.reverse(records)
}
|> Protox.Encode.encode!()
|> IO.iodata_to_binary()
data = serialized <> :crypto.hash(:md5, serialized)
case should_deflate? do
true ->
@magic_deflated <> :zlib.compress(data)
false ->
@magic <> data
end
end
defp pk_size(nil), do: 0
defp pk_size(pk), do: byte_size(pk)
end
|
lib/ex_kpl.ex
| 0.716913
| 0.505066
|
ex_kpl.ex
|
starcoder
|
defmodule ExWire.Packet.Capability.Par.SnapshotData do
@moduledoc """
Respond to a GetSnapshotData message with either an empty RLP list or a
1-item RLP list containing the raw chunk data requested.
```
`SnapshotData` [`0x14`, `chunk_data` or nothing]
```
"""
alias ExthCrypto.Hash.Keccak
alias ExWire.Packet.Capability.Par.SnapshotData.{BlockChunk, StateChunk}
require Logger
@behaviour ExWire.Packet
@type t :: %__MODULE__{
hash: EVM.hash(),
chunk: BlockChunk.t() | StateChunk.t() | nil
}
defstruct [:hash, :chunk]
@doc """
Returns the relative message id offset for this message.
This will help determine what its message ID is relative to other Packets in the same Capability.
"""
@impl true
@spec message_id_offset() :: 0x14
def message_id_offset do
0x14
end
@doc """
Given a SnapshotData packet, serializes for transport over Eth Wire Protocol.
## Examples
iex> %ExWire.Packet.Capability.Par.SnapshotData{
...> chunk: %ExWire.Packet.Capability.Par.SnapshotData.BlockChunk{
...> number: 5,
...> hash: <<6::256>>,
...> total_difficulty: 7,
...> block_data_list: []
...> }
...> }
...> |> ExWire.Packet.Capability.Par.SnapshotData.serialize()
[<<36, 12, 227, 5, 160, 0, 118, 1, 0, 4, 6, 7>>]
iex> %ExWire.Packet.Capability.Par.SnapshotData{
...> chunk: %ExWire.Packet.Capability.Par.SnapshotData.StateChunk{
...> account_entries: [
...> {
...> <<1::256>>,
...> %ExWire.Packet.Capability.Par.SnapshotData.StateChunk.RichAccount{
...> nonce: 2,
...> balance: 3,
...> code_flag: :has_code,
...> code: <<5::256>>,
...> storage: [{<<1::256>>, <<2::256>>}]
...> }
...> }
...> ]
...> }
...> }
...> |> ExWire.Packet.Capability.Par.SnapshotData.serialize()
[<<145, 1, 20, 248, 143, 248, 141, 160, 0, 118, 1, 0, 20, 1, 248, 106, 2, 3, 1, 126, 38, 0, 16, 5, 248, 68, 248, 66, 126, 37, 0, 130, 70, 0, 0, 2>>]
"""
@impl true
def serialize(%__MODULE__{chunk: chunk = %{__struct__: mod}}) do
{:ok, res} =
chunk
|> mod.serialize()
|> ExRLP.encode()
|> :snappyer.compress()
[res]
end
@doc """
Given an RLP-encoded SnapshotData packet from Eth Wire Protocol,
decodes into a SnapshotData struct.
## Examples
iex> [<<36, 12, 227, 5, 160, 0, 118, 1, 0, 4, 6, 7>>]
...> |> ExWire.Packet.Capability.Par.SnapshotData.deserialize()
%ExWire.Packet.Capability.Par.SnapshotData{
chunk: %ExWire.Packet.Capability.Par.SnapshotData.BlockChunk{
number: 5,
hash: <<6::256>>,
total_difficulty: 7,
block_data_list: []
},
hash: <<221, 170, 108, 39, 117, 113, 13, 3, 231, 40, 69, 49, 126, 6,
109, 164, 92, 237, 157, 243, 181, 196, 88, 128, 192, 177, 109,
36, 77, 236, 86, 196>>
}
iex> [<<145, 1, 20, 248, 143, 248, 141, 160, 0, 118, 1, 0, 20, 1, 248,
...> 106, 2, 3, 1, 126, 38, 0, 16, 5, 248, 68, 248, 66, 126, 37, 0,
...> 130, 70, 0, 0, 2>>]
...> |> ExWire.Packet.Capability.Par.SnapshotData.deserialize()
%ExWire.Packet.Capability.Par.SnapshotData{
chunk: %ExWire.Packet.Capability.Par.SnapshotData.StateChunk{
account_entries: [
{
<<1::256>>,
%ExWire.Packet.Capability.Par.SnapshotData.StateChunk.RichAccount{
nonce: 2,
balance: 3,
code_flag: :has_code,
code: <<5::256>>,
storage: [{<<1::256>>, <<2::256>>}]
}
}
]
},
hash: <<8, 203, 227, 135, 24, 92, 98, 193, 28, 230, 1, 177, 51, 95,
135, 13, 223, 76, 129, 212, 190, 45, 44, 204, 198, 38, 249,
186, 174, 18, 121, 52>>
}
"""
@impl true
def deserialize(rlp) do
[chunk_data] = rlp
hash = Keccak.kec(chunk_data)
{:ok, chunk_rlp_encoded} = :snappyer.decompress(chunk_data)
chunk_rlp = ExRLP.decode(chunk_rlp_encoded)
# Quick way to determine if chunk is a block chunk or state chunk is that
# state chunks start with a list element where block chunks do not.
chunk =
case chunk_rlp do
[] ->
nil
[el | _rest] when is_list(el) ->
StateChunk.deserialize(chunk_rlp)
_ ->
BlockChunk.deserialize(chunk_rlp)
end
%__MODULE__{chunk: chunk, hash: hash}
end
@doc """
Handles a SnapshotData message. We should send our manifest
to the peer. For now, we'll do nothing.
## Examples
iex> %ExWire.Packet.Capability.Par.SnapshotData{}
...> |> ExWire.Packet.Capability.Par.SnapshotData.handle()
:ok
"""
@impl true
def handle(_packet = %__MODULE__{}) do
:ok
end
end
|
apps/ex_wire/lib/ex_wire/packet/capability/par/snapshot_data.ex
| 0.862294
| 0.742515
|
snapshot_data.ex
|
starcoder
|
defmodule Bio.Interaction do
alias Bio.{Cell, Pellet, Physic, Sequence, Virus}
# No interaction
defp possible?(%Pellet{}, %Pellet{}), do: false
defp possible?(%Pellet{}, %Virus{}), do: false
defp possible?(%Virus{}, %Pellet{}), do: false
defp possible?(%Virus{}, %Virus{}), do: false
defp possible?(%{cells: cells}, %{physic: %Physic{}} = entity),
do: Enum.any?(cells, &possible?(&1, entity))
defp possible?(%{physic: %Physic{}} = entity, %{cells: _} = container),
do: possible?(container, entity)
defp possible?(%{cells: left_cells}, %{cells: right_cells}) do
Enum.any?(left_cells, fn left_cell -> Enum.any?(right_cells, &possible?(&1, left_cell)) end)
end
defp possible?(%{physic: %Physic{} = left_physic}, %{physic: %Physic{} = right_physic}),
do: Physic.collide?(left_physic, right_physic)
defp make(%Cell{} = cell, %Pellet{physic: %Physic{mass: pellet_mass}}),
do: [[update_in(cell.physic.mass, &(&1 + pellet_mass))], []]
defp make(%Pellet{} = pellet, %Cell{} = cell), do: make(cell, pellet) |> Enum.reverse()
defp make(
%Cell{physic: %Physic{mass: cell_mass}} = cell,
%Virus{physic: %Physic{mass: virus_mass}} = virus
) do
cond do
cell_mass > virus_mass ->
cell = update_in(cell.physic.mass, &(&1 + virus_mass))
if cell_mass > virus_mass * 2 do
[[cell], []]
else
[Cell.split(cell, 5), []]
end
true ->
[[cell], [virus]]
end
end
defp make(%Virus{} = virus, %Cell{} = cell), do: make(cell, virus) |> Enum.reverse()
defp make(
%Cell{physic: %Physic{mass: left_mass}} = left_cell,
%Cell{physic: %Physic{mass: right_mass}} = right_cell
) do
cond do
left_mass > right_mass -> [[update_in(left_cell.physic.mass, &(&1 + right_mass))], []]
left_mass == right_mass -> [[left_cell], [right_cell]]
left_mass < right_mass -> [[], [update_in(right_cell.physic.mass, &(&1 + left_mass))]]
end
end
defp make(%{cells: cells} = container, %{physic: %Physic{}} = entity) do
random_cells = Enum.shuffle(cells)
if index =
random_cells
|> Enum.find_index(&possible?(&1, entity)) do
[left_outcome, right_outcome] =
random_cells
|> Enum.at(index)
|> make(entity)
[
update_in(container.cells, fn _ -> left_outcome ++ List.delete_at(random_cells, index) end),
right_outcome
]
else
[[container], [entity]]
end
end
defp make(%{physic: %Physic{}} = entity, %{cells: _} = container),
do: make(container, entity) |> Enum.reverse()
defp make(%{cells: left_cells} = left_container, %{cells: right_cells} = right_container) do
left_random_cells = Enum.shuffle(left_cells)
right_random_cells = Enum.shuffle(right_cells)
left_indexes = 0..(length(left_random_cells) - 1)
right_indexes = 0..(length(right_random_cells) - 1)
{{left_consumed_indexes, right_consumed_indexes},
[left_updated_entities, right_updated_entities]} =
left_indexes
|> Sequence.combine(right_indexes)
|> Enum.reduce({{[], []}, [[], []]}, fn [left_index, right_index],
{{left_consumed_indexes, right_consumed_indexes} =
nested_consumed_indexes,
[left_updated_entities, right_updated_entities] =
nested_updated_entities} ->
case not Enum.member?(left_consumed_indexes, left_index) and
not Enum.member?(right_consumed_indexes, right_index) do
true ->
case try(Enum.at(left_cells, left_index), Enum.at(right_cells, right_index)) do
{:ok, [left_outcome, right_outcome]} ->
{{[left_index | left_consumed_indexes], [right_index | right_consumed_indexes]},
[left_outcome ++ left_updated_entities, right_outcome ++ right_updated_entities]}
{:error, _} ->
{nested_consumed_indexes, nested_updated_entities}
end
false ->
{nested_consumed_indexes, nested_updated_entities}
end
end)
left_non_consumed_entities =
left_indexes
|> Sequence.difference(left_consumed_indexes)
|> Enum.map(&Enum.at(left_random_cells, &1))
right_non_consumed_entities =
right_indexes
|> Sequence.difference(right_consumed_indexes)
|> Enum.map(&Enum.at(right_random_cells, &1))
case {left_updated_entities ++ left_non_consumed_entities,
right_updated_entities ++ right_non_consumed_entities} do
{[], []} ->
[[], []]
{new_left_cells, []} ->
[update_in(left_container.cells, fn _ -> new_left_cells end), []]
{[], new_right_cells} ->
[[], update_in(right_container.cells, fn _ -> new_right_cells end)]
{new_left_cells, new_right_cells} ->
[
update_in(left_container.cells, fn _ -> new_left_cells end),
update_in(right_container.cells, fn _ -> new_right_cells end)
]
end
end
# No interaction
defp make(%{physic: %Physic{}} = left_entity, %{physic: %Physic{}} = right_entity),
do: [[left_entity], [right_entity]]
def try(left_entity, right_entity) do
if possible?(left_entity, right_entity),
do: {:ok, make(left_entity, right_entity)},
else: {:error, [[left_entity], [right_entity]]}
end
def try([]), do: []
def try(entities) when is_list(entities) do
entities = Enum.shuffle(entities)
indexes = 0..(length(entities) - 1)
{consumed_indexes, updated_entities} =
indexes
|> Sequence.combine()
|> Enum.reduce({[], []}, fn [left_index, right_index] = indexes,
{consumed_indexes, updated_entities} ->
case length(indexes -- consumed_indexes) do
2 ->
case try(Enum.at(entities, left_index), Enum.at(entities, right_index)) do
{:ok, outcomes} ->
{indexes ++ consumed_indexes, List.flatten(outcomes, updated_entities)}
{:error, _} ->
{consumed_indexes, updated_entities}
end
_ ->
{consumed_indexes, updated_entities}
end
end)
non_consumed_entities =
indexes
|> Sequence.difference(consumed_indexes)
|> Enum.map(&Enum.at(entities, &1))
updated_entities ++ non_consumed_entities
end
end
|
lib/bio/interaction.ex
| 0.581184
| 0.627709
|
interaction.ex
|
starcoder
|
defmodule YtPotion.Channel do
import YtPotion.Base
@moduledoc """
Provides methods to interact with the YouTube Channels API
"""
@doc """
Returns the YouTube API response
## Examples
```elixir
iex > YtPotion.Channel.list(%{id: "UC0PEAMcRK7Mnn2G1bCBXOWQ,UCyMafjsLsswLvsd4u_ilprQ", part: "statistics"})
{:ok,
%HTTPoison.Response{body: "{\n \"kind\": \"youtube#channelListResponse\",\n \"etag\": \"\\\"gMxXHe-zinKdE9lTnzKu8vjcmDI/KeGCuFV_wF8Z6XHAhbUu6whFc-0\\\"\",\n \"pageInfo\": {\n \"totalResults\": 2,\n \"resultsPerPage\": 2\n },\n \"items\": [\n {\n \"kind\": \"youtube#channel\",\n \"etag\": \"\\\"gMxXHe-zinKdE9lTnzKu8vjcmDI/tMqC9AOnNDFtmDACV9EQD1PdRgE\\\"\",\n \"id\": \"UCyMafjsLsswLvsd4u_ilprQ\",\n \"statistics\": {\n \"viewCount\": \"0\",\n \"commentCount\": \"0\",\n \"subscriberCount\": \"72019\",\n \"hiddenSubscriberCount\": false,\n \"videoCount\": \"0\"\n }\n },\n {\n \"kind\": \"youtube#channel\",\n \"etag\": \"\\\"gMxXHe-zinKdE9lTnzKu8vjcmDI/2_DtWcM4_xfXufIwmhglg-w9N98\\\"\",\n \"id\": \"UC0PEAMcRK7Mnn2G1bCBXOWQ\",\n \"statistics\": {\n \"viewCount\": \"21305557\",\n \"commentCount\": \"0\",\n \"subscriberCount\": \"82335\",\n \"hiddenSubscriberCount\": false,\n \"videoCount\": \"105\"\n }\n }\n ]\n}\n",
headers: [{"Expires", "Wed, 01 Feb 2017 01:50:40 GMT"},
{"Date", "Wed, 01 Feb 2017 01:50:40 GMT"},
{"Cache-Control", "private, max-age=0, must-revalidate, no-transform"},
{"ETag", "\"gMxXHe-zinKdE9lTnzKu8vjcmDI/KeGCuFV_wF8Z6XHAhbUu6whFc-0\""},
{"Vary", "Origin"}, {"Vary", "X-Origin"},
{"Content-Type", "application/json; charset=UTF-8"},
{"X-Content-Type-Options", "nosniff"}, {"X-Frame-Options", "SAMEORIGIN"},
{"X-XSS-Protection", "1; mode=block"}, {"Content-Length", "823"},
{"Server", "GSE"}, {"Alt-Svc", "quic=\":443\"; ma=2592000; v=\"35,34\""}],
status_code: 200}}
```
"""
@spec list(map, map) :: map #HTTPoison.Response{}
yt_macro_call list(filter_part, options \\ %{}) do
get_request("channels", Map.merge(filter_part, options))
end
end
|
lib/yt_potion/yt_potion_channel.ex
| 0.799638
| 0.445469
|
yt_potion_channel.ex
|
starcoder
|
defmodule PigLatin do
@doc """
Given a `phrase`, translate it a word at a time to Pig Latin.
Words beginning with consonants should have the consonant moved to the end of
the word, followed by "ay".
Words beginning with vowels (aeiou) should have "ay" added to the end of the
word.
Some groups of letters are treated like consonants, including "ch", "qu",
"squ", "th", "thr", and "sch".
Some groups are treated like vowels, including "yt" and "xr".
"""
@spec translate(phrase :: String.t()) :: String.t()
def translate(phrase) do
cond do
bigest_than_one_word?(phrase) ->
phrase
|> String.split
|> Enum.map(&PigLatin.translate/1)
|> Enum.join(" ")
beginning_with_y_or_x_followed_by_consonant?(phrase) ->
phrase
|> String.replace_suffix("", "ay")
beginning_consonant?(phrase) ->
phrase
|> String.codepoints
|> flip_consonant_letters_to_end([])
|> List.flatten
|> Enum.join
|> String.replace_suffix("", "ay")
true ->
phrase
|> String.replace_suffix("", "ay")
end
end
defp bigest_than_one_word?(phrase), do: phrase |> String.split |> Enum.count > 1
defp beginning_consonant?(phrase), do: Regex.match?(~r/^[^aeiou]+/i, phrase)
defp beginning_consonant_with_qu?(character, phrase), do: character == "u" and Enum.member?(phrase, "q")
defp beginning_with_y_or_x_followed_by_consonant?(<<first_letter::binary-size(1), rest_word::binary>>) do
Enum.member?(["x", "y"], first_letter) and beginning_consonant?(rest_word)
end
defp flip_consonant_letters_to_end([], flip_phrase), do: flip_phrase
defp flip_consonant_letters_to_end([character | rest_phrase], flip_phrase) do
cond do
beginning_consonant_with_qu?(character, flip_phrase) ->
flip_consonant_letters_to_end(rest_phrase, [character | flip_phrase])
beginning_consonant?(character) ->
flip_consonant_letters_to_end(rest_phrase, [character | flip_phrase])
true ->
reverse_phrase = Enum.reverse(flip_phrase)
flip_consonant_letters_to_end([], [character | [rest_phrase | reverse_phrase]])
end
end
end
|
elixir/pig-latin/lib/pig_latin.ex
| 0.697197
| 0.469581
|
pig_latin.ex
|
starcoder
|
defmodule RSA do
@moduledoc """
Documentation for RSA.
"""
defp big_num(s) do
Enum.reduce(1..s+1, fn _, y -> y * 2 end)
end
defp big_pow_mod_res(_, b, _, ans) when b <= 0, do: ans
defp big_pow_mod_res(a, b, c, ans) do
ans = if rem(b, 2) == 1, do: rem((ans * a), c), else: ans
b = b/2 |> trunc
a = a*a |> rem(c)
big_pow_mod_res(a, b, c, ans)
end
defp big_pow_mod(a, b, c) do
big_pow_mod_res(rem(a, c), b, c, 1)
end
defp miller_rabin(n, k, r, s) do
try do
for _ <- 0..k do
a = Enum.random(2..n-1)
x = big_pow_mod(a, s, n)
if x != 1 && x != n - 1 do
for _ <- 0..r-1 do
x = big_pow_mod(x, 2, n)
if x == n - 1, do: throw(:true)
end
throw(:false)
end
end
true
catch
:false -> false
:true -> true
end
end
defp max_cardinal(r, n) when rem(n, 2) != 0, do: {r, n}
defp max_cardinal(r, n) do
max_cardinal(r+1, div(n, 2))
end
defp is_prime?(n) when n in [2, 3], do: true
defp is_prime?(n) when rem(n, 2) == 0, do: false
defp is_prime?(n) do
{r, s}= max_cardinal(0, n - 1)
miller_rabin(n, 100, r, s)
end
defp prng() do
:crypto.rand_uniform(big_num(10), big_num(11))
end
defp prng_prime() do
n = prng()
if is_prime?(n) == true, do: n, else: prng_prime()
end
defp gcd(a, b) when b == 0, do: a
defp gcd(a, b) do
gcd(b, rem(a, b))
end
defp lcm(a, b) do
a * b / gcd(a, b) |> trunc
end
def rng_e(l) do
Enum.find 2..l, &(gcd(&1, l) == 1)
end
def rng_d(e, l) do
Enum.find 2..l, &rem(e * &1, l) == 1
end
def generate() do
q = prng_prime()
p = prng_prime()
n = p * q
l = lcm(q - 1, p - 1)
e = rng_e(l)
d = rng_d(e, l)
{{e, n}, {d, n}}
end
def encrypt(plaintext, {e, n}) do
p = plaintext
big_pow_mod(p, e, n)
end
def decrypt(ciphertext, {d, n}) do
c = ciphertext
big_pow_mod(c, d, n)
end
def sign(plaintext, {d, n}) do
p = plaintext
big_pow_mod(p, d, n)
end
def verify(plaintext, signature, {e, n}) do
s = signature
big_pow_mod(s, e, n) == plaintext
end
end
|
lib/rsa.ex
| 0.548553
| 0.512205
|
rsa.ex
|
starcoder
|
defmodule PhoenixETag do
@moduledoc """
Conditional request (ETag & modified-since) support for Phoenix.
## Usage
The library provides a replacement function for `Phoenix.Controller.render/1-4`
called `PhoenixETag.render_if_stale/1-4` accepting exactly the same arguments.
When called the function expects the view to implement an additional callback:
`stale_checks/2` similar to `render/2` that is responsible for returning the
etag value and/or last modified value for the current resource.
Additional helper `PhoenixETag.schema_etag/1` is provided for generating etag
values of a single or multiple schema structs.
# controller
def show(conn, %{"id" => id}) do
data = MyApp.load_data(id)
PhoenixETag.render_if_stale(conn, :show, data: data)
end
# view
def stale_checks("show." <> _format, %{data: data}) do
[etag: PhoenixETag.schema_etag(data),
last_modified: PhoenixETag.schema_last_modified(data)]
end
Both the `etag` and `last_modified` values are optional. The first one will add
an `etag` header to the response and perform a stale check against the
`if-none-match` header. The second one will add a `last-modified` header to the
response and perform a stale check against the `if-modified-since` header.
If the headers indicate cache is fresh a 304 Not Modified response is triggered,
and rendering of the response is aborted. If headers indicate cache is stale,
render proceeds as normal, except the extra headers are added to the response.
"""
@type schema :: %{__struct__: atom, id: term, updated_at: Calendar.date_time}
@type etag :: String.t
@doc """
Utility function for generating etag values from schemas.
This function assumes the schema has `id` field of any type and
`updated_at` field of either the `:utc_datetime` or `:naive_datetime` type.
A weak ETag is always produced.
"""
@spec schema_etag(nil | schema | [schema]) :: etag
def schema_etag(nil), do: nil
def schema_etag([]), do: nil
def schema_etag(schema_or_schemas) do
list = Enum.map(List.wrap(schema_or_schemas), fn schema ->
[schema.__struct__, schema.id, NaiveDateTime.to_erl(schema.updated_at)]
end)
binary = :erlang.term_to_binary(list)
"W/ " <> Base.encode16(:crypto.hash(:md5, binary), case: :lower)
end
@doc """
Utility function for obtaining the last modified value from schemas.
This function expects the schema to define a `updated_at` field of either
`:utc_datetime` or `:naive_datetime` type.
"""
@spec schema_last_modified(nil | schema | [schema]) :: Calendar.date_time
def schema_last_modified(nil), do: nil
def schema_last_modified([]), do: nil
def schema_last_modified(schema_or_schemas) do
schema_or_schemas
|> List.wrap
|> Enum.map(&(&1.updated_at))
|> Enum.max_by(&NaiveDateTime.to_erl/1)
end
@doc """
Renders the given template or the default template
specified by the current action with the given assigns.
See `render_if_stale/3` for more information.
"""
@spec render_if_stale(Plug.Conn.t, Keyword.t | map | binary | atom) :: Plug.Conn.t
def render_if_stale(conn, template_or_assigns \\ [])
def render_if_stale(conn, template) when is_binary(template) or is_atom(template) do
render_if_stale(conn, template, %{})
end
def render_if_stale(conn, assigns) when is_map(assigns) or is_list(assigns) do
action = Phoenix.Controller.action_name(conn)
render_if_stale(conn, action, assigns)
end
@doc """
Renders the given `template` and `assigns` based on the `conn` information.
Considers data freshness based on the `if-modified-since` and `if-none-match`
headers and avoids rendering if a `304 Not Modified` response is possible.
Expects the view module to implement an additional callback: `stale_checks/2`.
The function is called with the template name and assigns - exactly like
`render/2`. The callback is expected to return a keyword list with two
possible keys:
* `:etag` - the entity tag for the current resource. Can be generated using
the `schema_etag/1` utility function.
* `:last_modified` - the last modified value for the current resource. Has to
be either a `NaiveDateTime` or a `DateTime`. The value can be obtained with
the `schema_last_modified/1` utility function.
See `Phoenix.Controller.render/3` for more information.
"""
@spec render_if_stale(Plug.Conn.t, binary | atom, Keyword.t | map) :: Plug.Conn.t
def render_if_stale(conn, template, assigns)
when is_atom(template) and (is_list(assigns) or is_map(assigns)) do
format =
Phoenix.Controller.get_format(conn) ||
raise "cannot render template #{inspect template} because conn.params[\"_format\"] is not set. " <>
"Please set `plug :accepts, ~w(html json ...)` in your pipeline."
template = template_name(template, format)
do_render_if_stale(conn, template, assigns)
end
def render_if_stale(conn, template, assigns)
when is_binary(template) and (is_list(assigns) or is_map(assigns)) do
case Path.extname(template) do
"." <> _format ->
# We need to do this check before trying to ask for stale checks,
# otherwise we'll hit FunctionClauseError in view instead of this one
do_render_if_stale(conn, template, assigns)
"" ->
raise "cannot render template #{inspect template} without format. Use an atom if the " <>
"template format is meant to be set dynamically based on the request format"
end
end
def render_if_stale(conn, view, template)
when is_atom(view) and (is_binary(template) or is_atom(template)) do
render_if_stale(conn, view, template, %{})
end
@doc """
A shortcut that renders the given template in the given view.
Equivalent to:
conn
|> put_view(view)
|> render_if_stale(template, assigns)
"""
@spec render_if_stale(Plug.Conn.t, atom, atom | binary, Keyword.t | map) :: Plug.Conn.t
def render_if_stale(conn, view, template, assigns)
when is_atom(view) and (is_binary(template) or is_atom(template)) do
conn
|> Phoenix.Controller.put_view(view)
|> render_if_stale(template, assigns)
end
defp do_render_if_stale(conn, template, assigns) do
view = Phoenix.Controller.view_module(conn) ||
raise "a view module was not specified, set one with put_view/2"
conn
|> prepare_assigns(assigns)
|> if_stale(view, template, &Phoenix.Controller.render(&1, template, &2))
end
defp template_name(name, format) when is_atom(name),
do: Atom.to_string(name) <> "." <> format
defp template_name(name, _format) when is_binary(name),
do: name
defp prepare_assigns(conn, assigns) do
update_in conn.assigns, &Enum.into(assigns, &1)
end
defp if_stale(conn, view, template, fun) do
checks = view.stale_checks(template, conn.assigns)
etag = checks[:etag]
modified = checks[:last_modified]
conn =
conn
|> put_etag(etag)
|> put_last_modified(modified)
if stale?(conn, etag, modified) do
fun.(conn, Map.take(conn.assigns, [:layout]))
else
Plug.Conn.send_resp(conn, 304, "")
end
end
defp put_etag(conn, nil),
do: conn
defp put_etag(conn, etag),
do: Plug.Conn.put_resp_header(conn, "etag", etag)
defp put_last_modified(conn, nil),
do: conn
defp put_last_modified(conn, modified) do
Plug.Conn.put_resp_header(conn, "last-modified", format_date(modified))
end
defp stale?(conn, etag, modified) do
modified_since = List.first Plug.Conn.get_req_header(conn, "if-modified-since")
none_match = List.first Plug.Conn.get_req_header(conn, "if-none-match")
if get_or_head?(conn) and (modified_since || none_match) do
modified_since?(modified_since, modified) or none_match?(none_match, etag)
else
true
end
end
defp get_or_head?(%{method: method}), do: method in ["GET", "HEAD"]
defp modified_since?(header, last_modified) do
if header && last_modified do
modified_since = parse_date(header)
last_modified = to_unix(last_modified)
last_modified > modified_since
else
false
end
end
defp none_match?(none_match, etag) do
if none_match && etag do
none_match = Plug.Conn.Utils.list(none_match)
not(etag in none_match) and not("*" in none_match)
else
false
end
end
defp to_unix(%DateTime{} = dt), do: DateTime.to_unix(dt)
defp to_unix(naive), do: to_unix(DateTime.from_naive!(naive, "Etc/UTC"))
defp format_date(datetime) do
datetime
|> NaiveDateTime.to_erl
|> :phoenix_etag_date.rfc1123
end
defp parse_date(string) do
string
|> :phoenix_etag_date.parse_date
|> NaiveDateTime.from_erl!
|> DateTime.from_naive!("Etc/UTC")
|> DateTime.to_unix
end
end
|
lib/phoenix_etag.ex
| 0.862207
| 0.636692
|
phoenix_etag.ex
|
starcoder
|
defmodule Broker.Supervisor.Collector do
@moduledoc """
This the Collector supervisor which supervise the whole
collector engine per topic.
"""
use Supervisor
alias Broker.Collector
@spec start_link(list) :: tuple
def start_link(args) do
Supervisor.start_link(__MODULE__, args, name: args[:name])
end
@spec init(list) :: tuple
def init(args) do
children = get_children_by_topic(args[:topic],args)
Supervisor.init(children, strategy: :one_for_all)
end
defp get_children_by_topic(:tx_trytes,args) do
# gen tx_feeder(s) children
# first fetch nodes.
nodes = args[:nodes]
new_args = Keyword.drop(args, [:nodes, :name])
tx_feeder_children =
for {{host, port}, num} <- Enum.with_index(nodes) do
# drop [nodes,name] as no longer needed and create new args
args = new_args++[host: host, port: port, num: num]
{Collector.TxFeeder, args}
end
transaction_partitions = args[:transaction_partitions]-1
bundle_partitions = args[:bundle_partitions]-1
children_1 =
[
{Collector.Distributor,new_args}
]
children_2 =
for num <- 0..transaction_partitions do
{Collector.TransactionValidator, [num: num]++new_args}
end
children_3 =
for num <- 0..transaction_partitions do
{Collector.TxCollector, [num: num]++new_args}
end
children_4 =
for num <- 0..bundle_partitions do
{Collector.TxBundleCollector, [num: num]++new_args}
end
children_5 =
for num <- 0..bundle_partitions do
{Collector.BundleValidator, [num: num]++new_args}
end
tx_feeder_children++children_1++children_2++children_3++children_4++children_5
end
defp get_children_by_topic(:sn_trytes,args) do
# gen sn_feeder(s) children
# first fetch nodes.
nodes = args[:nodes]
new_args = Keyword.drop(args, [:nodes, :name])
sn_feeder_children =
for {{host, port}, num} <- Enum.with_index(nodes) do
# drop [nodes,name] as no longer needed and create new args
args = new_args++[host: host, port: port, num: num]
{Collector.SnFeeder, args}
end
transaction_partitions = args[:transaction_partitions]-1
bundle_partitions = args[:bundle_partitions]-1
children_1 =
[
{Collector.Distributor,new_args}
]
children_2 =
for num <- 0..transaction_partitions do
{Collector.TransactionValidator, [num: num]++new_args}
end
children_3 =
for num <- 0..transaction_partitions do
{Collector.SnCollector, [num: num]++new_args}
end
children_4 =
for num <- 0..bundle_partitions do
{Collector.SnBundleCollector, [num: num]++new_args}
end
children_5 =
for num <- 0..bundle_partitions do
{Collector.BundleValidator, [num: num]++new_args}
end
sn_feeder_children++children_1++children_2++children_3++children_4++children_5
end
end
|
apps/broker/lib/broker/supervisor/collector.ex
| 0.564219
| 0.444806
|
collector.ex
|
starcoder
|
defmodule CPF do
@moduledoc """
CPF mobulde provides functions to verify if a CPF is valid.
"""
@typedoc """
A custom CPF type that can be a number or string
"""
@type t :: integer | String.t()
@doc """
Returns `true` the given `cpf` is valid, otherwise `false`.
## Examples
iex> CPF.valid?(563_606_676_73)
true
iex> CPF.valid?(563_606_676_72)
false
iex> CPF.valid?("563.606.676-73")
true
iex> CPF.valid?("563/60.6-676/73")
false
iex> CPF.valid?("563.606.676-72")
false
iex> CPF.valid?("56360667673")
true
iex> CPF.valid?("56360667672")
false
"""
@spec valid?(t) :: boolean
def valid?(cpf) when is_integer(cpf) do
cpf_digits = Integer.digits(cpf)
padding = 11 - length(cpf_digits)
same_digits? = cpf_digits |> Enum.uniq() |> length() == 1
if padding >= 0 && !same_digits? do
cpf_digits = cpf_digits |> add_padding(padding) |> List.to_tuple()
{v1, v2} = calculate_verifier_digits(cpf_digits)
{given_v1, given_v2} = extract_given_verifier_digits(cpf_digits)
v1 == given_v1 && v2 == given_v2
else
false
end
end
def valid?(
<<left_digits::bytes-size(3)>> <>
"." <>
<<middle_digits::bytes-size(3)>> <>
"." <>
<<right_digits::bytes-size(3)>> <>
"-" <>
<<verifier_digits::bytes-size(2)>>
) do
valid?(left_digits <> middle_digits <> right_digits <> verifier_digits)
end
def valid?(cpf) when is_binary(cpf) do
case Integer.parse(cpf) do
{cpf_int, ""} ->
valid?(cpf_int)
_ ->
false
end
end
def valid?(_cpf), do: false
defp add_padding(digits, 0), do: digits
defp add_padding(digits, padding) do
add_padding([0 | digits], padding - 1)
end
defp calculate_verifier_digits(digits) do
{v1_sum, v2_sum} = sum_digits(digits)
v1 = digit_verifier(v1_sum)
v2_sum = v2_sum + 2 * v1
v2 = digit_verifier(v2_sum)
{v1, v2}
end
defp sum_digits(digits) do
v1_weight = 10
v2_weight = 11
Enum.reduce(0..8, {0, 0}, fn i, {v1_sum, v2_sum} ->
v1 = (v1_weight - i) * elem(digits, i)
v2 = (v2_weight - i) * elem(digits, i)
{v1_sum + v1, v2_sum + v2}
end)
end
defp digit_verifier(sum) do
rem = rem(sum, 11)
if rem in [0, 1], do: 0, else: 11 - rem
end
defp extract_given_verifier_digits(digits) do
{elem(digits, 9), elem(digits, 10)}
end
end
|
lib/cpf.ex
| 0.861931
| 0.475605
|
cpf.ex
|
starcoder
|
defmodule Day13 do
def from_file(path) do
File.read!(path)
|> String.split(",")
|> Enum.map(&Integer.parse/1)
|> Enum.map(&(elem(&1, 0)))
end
def game(program) do
game_execute(Intcode.execute(Intcode.initial_state(program, [])))
end
def game_with_freeplay(program) do
game_execute(Intcode.execute(Intcode.initial_state(program, []) |> set_freeplay))
end
def set_freeplay(state), do: %{state | :memory => Map.put(state.memory, 0, 2)}
def game_execute(state, board \\ %{}) do
output = state.output
|> Enum.reverse
|> Enum.chunk_every(3)
new_board = update(board, output)
input = determine_input(new_board)
if Map.get(state, :done, false) do
new_board
else
game_execute(Intcode.execute(%{state | :inputs => input, :output => []}), new_board)
end
end
def determine_input(board) do
{px, _} = board |> find(3)
{bx, _} = board |> find(4)
cond do
px == bx -> [0]
px > bx -> [-1]
px < bx -> [1]
end
end
def find(board, tile) do
board
|> Map.to_list
|> Enum.find(fn {{x, _}, type} -> type == tile && x >= 0 end)
|> elem(0)
end
def update(%{} = board, []), do: board
def update(%{} = board, [head | _] = updates) when is_list(head) do
updates
|> Enum.reduce(board, fn tile, board ->
update(board, tile)
end)
end
def update(%{} = board, [x, y, tile_id]) do
board |> Map.put({x, y}, tile_id)
end
def blocks(board) do
board |> Map.values |> Enum.reduce(0, fn tile_id, blocks -> if tile_id == 2, do: blocks + 1, else: blocks end)
end
def score(board), do: board |> Map.get({-1, 0}, 0)
def print(board) when map_size(board) == 0, do: nil
def print(board) do
y_boundary = board |> Map.keys |> Enum.group_by(fn {_, y} -> y end) |> Map.keys
y_range = (y_boundary |> Enum.min) .. (y_boundary |> Enum.max)
x_boundary = board |> Map.keys |> Enum.group_by(fn {x, _} -> x end) |> Map.keys
x_range = (x_boundary |> Enum.min) .. (x_boundary |> Enum.max)
Enum.each(y_range, fn y ->
Enum.each(x_range, fn x ->
type = Map.get(board, {x, y}, 0)
cond do
{x, y} == {-1, 0} -> IO.write(type)
type == 0 -> IO.write(" ")
type == 1 -> IO.write("#")
type == 2 -> IO.write("*")
type == 3 -> IO.write("_")
type == 4 -> IO.write(".")
end
end)
IO.puts("")
end)
end
def solution do
IO.puts("#{inspect from_file("day13_input.txt") |> game |> blocks}")
IO.puts("#{inspect from_file("day13_input.txt") |> game_with_freeplay |> score}")
end
end
|
lib/day13.ex
| 0.510496
| 0.416886
|
day13.ex
|
starcoder
|
defmodule OneDialer do
@moduledoc """
Click to call implementation for OneChat.
An interface for external PBX or calling service for handling calling
phone numbers using a third party phone calling service like a PBX or
web service.
This module requires an adapter implementation which is configured with
config :infinity_one, :dialer_adapter, SomeModule
The adapter must implement the `dial/4` function.
The interface also supports digit translation, which is gone here, before
the call to the adapter. The translator uses uses a pattern matching approach
taken from the asterisk open source PBX, with the following definition.
`N` - matches digits 2-9
`Z` - matches digits 1-9
`X` - matches digits 0-9
For example, to match a 10 digit number and insert a leading 1, use the
following `"1, NXXNXXXXXX"`, where the initial `1` is the inserted digit and
the trailing pattern it the matching specification.
iex> OneDialer.translate_digits("5555555555", "1, NXXNXXXXXX")
"15555555555"
iex> OneDialer.translate_digits("15555555555", "1, NXXNXXXXXX")
"15555555555"
iex> OneDialer.translate_digits("1234", "1, NXXNXXXXXX")
"1234"
Multiple translations can be defined by using a `,` to separate each. For
example, to add a second rule to add a 613 prefix to 7 digit numbers, use the
following `"1, NXXNXXXXXX, 613, NXXXXXX"`
iex> OneDialer.translate_digits("2234567", "1, NXXNXXXXXX, 613, NXXXXXX")
"6132234567"
iex> OneDialer.translate_digits("7322608", "1613, 73XXXXX")
"16137322608"
"""
require Logger
@adapter Application.get_env(:infinity_one, :dialer_adapter, nil)
@doc """
Call the dial function on the configured adapter.
Calls a number by running the `dial/4` function on the configured adapter.
"""
def dial(user, caller, number, opts), do: dial({user, caller}, number, opts)
def dial({_user, nil}, _number, _opts), do: nil
def dial({user, caller}, number, opts) do
# Logger.warn "dial number: #{inspect number}"
adapter = opts[:adapter] || @adapter
if adapter do
adapter.dial(user, caller, translate_digits(number), opts)
else
Logger.error """
OneDialer attempt to dial number #{number} without a configured adapter.
Please configure and adapter with:
config :one_dialer, :dialer_adapter, DialerModule
"""
end
end
@doc """
Apply configured digit translation rules to the called number.
"""
def translate_digits(digits, translation \\ nil) do
translation = translation || Application.get_env(:one_dialer, :dial_translation, "")
translation
|> String.replace(" ", "")
|> String.split(",")
|> Enum.chunk(2)
|> process_patterns(digits)
end
defp process_patterns(list, digits) do
Enum.find_value(list, digits, fn([insert_digits, pattern]) ->
("^" <> pattern)
|> String.replace("N", "[2-9]")
|> String.replace("Z", "[1-9]")
|> String.replace("X", "[0-9]")
|> Regex.compile!
|> find_and_replace(digits, "#{insert_digits}\\0")
end)
end
defp find_and_replace(regex, digits, replace_str) do
if Regex.run(regex, digits),
do: Regex.replace(regex, digits, replace_str)
end
end
|
plugins/one_dialer/lib/one_dialer.ex
| 0.864425
| 0.623004
|
one_dialer.ex
|
starcoder
|
defmodule Bf do
use Bitwise
@moduledoc """
Interprets brainfuck programs.
## Examples
Bf.Parser.parse("--[>--->->->++>-<<<<<-------]>--.>---------.>--..+++.")
|> Bf.run()
Hello
"""
@typedoc """
The state returned by the interpreter.
It is the current cell index and the memory.
"""
@type state :: {integer, list(integer)}
@mem_size 30_000
@doc """
Parses and executes a brainfuck program. Returns the machine's state.
## Examples
Bf.Parser.parse("++++++++++[->++++++++++<]>++.+++++++++.." <>
"<+++++++++[->---------<]>-----------------.---.<")
|> Bf.run
foo
"""
@spec run({:ok, Bf.Parser.program()}) :: state
def run({:ok, program}) do
mem = :array.new(@mem_size, default: 0)
{ptr, mem} = run(program, 0, mem)
{ptr, :array.to_list(mem)}
end
defp run([{:add, x} | rest], ptr, mem) do
cell = :array.get(ptr, mem)
new_mem = :array.set(ptr, cell + x &&& 0xFF, mem)
run(rest, ptr, new_mem)
end
defp run([{:move, x} | rest], ptr, mem) do
run(rest, wrap(ptr + x, @mem_size), mem)
end
defp run([{:set, x} | rest], ptr, mem) do
new_mem = :array.set(ptr, x &&& 0xFF, mem)
run(rest, ptr, new_mem)
end
defp run([{:scan, step} | rest], ptr, mem) do
run(rest, scan(ptr, mem, step), mem)
end
defp run([{:write} | rest], ptr, mem) do
putc(ptr, mem)
run(rest, ptr, mem)
end
defp run([{:read} | rest], ptr, mem) do
case readc() do
:eof ->
run(rest, ptr, mem)
char ->
new_mem = :array.set(ptr, char &&& 0xFF, mem)
run(rest, ptr, new_mem)
end
end
defp run(program = [{:loop, body} | rest], ptr, mem) do
cell = :array.get(ptr, mem)
case cell do
0 ->
run(rest, ptr, mem)
_ ->
{p, m} = run(body, ptr, mem)
run(program, p, m)
end
end
defp run([], ptr, mem), do: {ptr, mem}
defp putc(ptr, mem) do
cell = :array.get(ptr, mem)
IO.binwrite(<<cell>>)
end
defp readc do
case IO.getn("", 1) do
{:error, _reason} -> :eof
:eof -> :eof
<<char>> -> char
end
end
defp scan(ptr, mem, step) do
cell = :array.get(ptr, mem)
case cell do
0 -> ptr
_ -> scan(wrap(ptr + step, @mem_size), mem, step)
end
end
defp wrap(a, b) do
case rem(a, b) do
value when value < 0 -> value + b
value -> value
end
end
end
|
lib/bf.ex
| 0.569972
| 0.456228
|
bf.ex
|
starcoder
|
defmodule Brando.Tag do
@moduledoc """
Helpers for tagging schema data.
Adds a `tags` field to your schema.
## Example/Usage
Controller:
use Brando.Tag,
[:controller, [schema: Brando.Post]]
View:
use Brando.Tag, :view
Schema:
use Brando.Tag, :schema
schema "my_schema" do
# ...
tags
end
You will find a function in your schema called `by_tag/1` which returns
an Ecto Queryable of all records in your schema matching `tag`.
Migration:
use Brando.Tag, :migration
def up do
create table(:schema) do
# ...
tags
end
end
Vue frontend:
* Add a `KInputTags` component to your view
* Add `tags` to your gql schema as `:json`
* Add `tags` to your gql input object as `list_of(:string)`
"""
defmodule Schema do
@moduledoc false
@doc false
defmacro tags do
quote do
Ecto.Schema.field(:tags, {:array, :string})
end
end
end
defmodule Migration do
@moduledoc false
@doc false
defmacro tags do
quote do
Ecto.Migration.add(:tags, {:array, :varchar})
end
end
end
@doc false
def controller(_schema, _filter \\ nil) do
quote do
end
end
@doc false
def schema do
quote do
import Brando.Tag.Schema, only: [tags: 0]
@doc """
Search `schema`'s tags field for `tags`
"""
def by_tag(tag) do
from m in __MODULE__,
where: ^tag in m.tags
end
end
end
@doc false
def migration do
quote do
import Brando.Tag.Migration, only: [tags: 0]
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
defmacro __using__([:controller, ctrl_opts] = opts) when is_list(opts) do
apply(__MODULE__, :controller, ctrl_opts)
end
@doc """
Splits the "tags" field in `params` to an array and returns `params`
"""
def split_tags(%{"tags" => nil} = params), do: params
def split_tags(%{"tags" => tags} = params) do
split_tags =
tags
|> String.split(",")
|> Enum.map(&String.trim/1)
Map.put(params, "tags", split_tags)
end
def split_tags(%{tags: nil} = params), do: params
def split_tags(%{tags: tags} = params) do
split_tags =
tags
|> String.split(",")
|> Enum.map(&String.trim/1)
Map.put(params, :tags, split_tags)
end
def split_tags(params) do
params
end
end
|
lib/brando/tag.ex
| 0.87401
| 0.510435
|
tag.ex
|
starcoder
|
defmodule Guardian.Plug.EnsurePermissions do
@moduledoc """
Use this plug to ensure that there are the
correct permissions set in the claims found on the connection.
### Example
alias Guardian.Plug.EnsurePermissions
# read and write permissions for the admin set
plug EnsurePermissions, admin: [:read, :write], handler: SomeMod,
# read AND write permissions for the admin set
# AND :profile for the default set
plug EnsurePermissions, admin: [:read, :write],
default: [:profile],
handler: SomeMod
# read AND write permissions for the admin set
# OR :profile for the default set
plug EnsurePermissions, one_of: [%{admin: [:read, :write]},
%{default: [:profile]}],
handler: SomeMod
# admin :read AND :write for the claims located in the :secret location
plug EnsurePermissions, key: :secret,
admin: [:read, :write],
handler:SomeMod
On failure will be handed the connection with the conn,
and params where reason: `:forbidden`
The handler will be called on failure.
The `:unauthorized` function will be called when a failure is detected.
"""
require Logger
import Plug.Conn
def init(opts) do
opts = Enum.into(opts, %{})
on_failure = Map.get(opts, :on_failure)
key = Map.get(opts, :key, :default)
handler = Map.get(opts, :handler)
perm_sets = case Map.get(opts, :one_of) do
nil ->
single_set = Map.drop(opts, [:handler, :on_failure, :key, :one_of])
if Enum.empty?(single_set) do
[]
else
[single_set]
end
one_of ->
if Keyword.keyword?(one_of) do
[Enum.into(one_of, %{})]
else
one_of
end
end
handler_tuple = if handler do
{handler, :unauthorized}
else
case on_failure do
{mod, f} ->
_ = Logger.warn(":on_failure is deprecated. Use :handler")
{mod, f}
_ -> raise "Requires a handler module to be passed"
end
end
%{
handler: handler_tuple,
key: key,
perm_sets: perm_sets
}
end
@doc false
def call(conn, opts) do
key = Map.get(opts, :key)
case Guardian.Plug.claims(conn, key) do
{:ok, claims} ->
if matches_permissions?(claims, Map.get(opts, :perm_sets)) do
conn
else
handle_error(conn, opts)
end
{:error, _} -> handle_error(conn, opts)
end
end
defp matches_permissions?(_, []), do: true
defp matches_permissions?(claims, sets) do
Enum.any?(sets, &matches_permission_set?(claims, &1))
end
defp matches_permission_set?(claims, set) do
Enum.all?(set, fn({perm_key, required_perms}) ->
claims
|> Guardian.Permissions.from_claims(perm_key)
|> Guardian.Permissions.all?(required_perms, perm_key)
end)
end
defp handle_error(%Plug.Conn{params: params} = conn, opts) do
conn = conn |> assign(:guardian_failure, :forbidden) |> halt
params = Map.merge(params, %{reason: :forbidden})
{mod, meth} = Map.get(opts, :handler)
apply(mod, meth, [conn, params])
end
end
|
lib/guardian/plug/ensure_permissions.ex
| 0.674908
| 0.40987
|
ensure_permissions.ex
|
starcoder
|
defmodule Patch do
@moduledoc """
Patch - Ergonomic Mocking for Elixir
Patch makes it easy to mock one or more functions in a module returning a value or executing
custom logic. Patches and Spies allow tests to assert or refute that function calls have been
made.
Using Patch is as easy as adding a single line to your test case.
```elixir
use Patch
```
After this all the patch functions will be available, see the function documentation for
details.
"""
alias Patch.Mock
alias Patch.Mock.Naming
alias Patch.Mock.Value
import Value
require Value
## Exceptions
defmodule ConfigurationError do
defexception [:message]
end
defmodule InvalidAnyCall do
defexception [:message]
end
defmodule MissingCall do
defexception [:message]
end
defmodule UnexpectedCall do
defexception [:message]
end
defmacro __using__(_) do
quote do
require unquote(__MODULE__)
import unquote(__MODULE__)
import Patch.Mock.Value, except: [advance: 1, next: 2]
require Patch.Macro
require Patch.Mock
require Patch.Assertions
setup do
start_supervised!(Patch.Supervisor)
on_exit(fn ->
Patch.Mock.Code.Freezer.empty()
end)
:ok
end
end
end
@doc """
Asserts that the given module and function has been called with any arity.
```elixir
patch(Example, :function, :patch)
assert_any_call Example.function # fails
Example.function(1, 2, 3)
assert_any_call Example.function # passes
```
"""
@spec assert_any_call(call :: Macro.t()) :: Macro.t()
defmacro assert_any_call(call) do
{module, function, arguments} = Macro.decompose_call(call)
unless Enum.empty?(arguments) do
raise InvalidAnyCall, message: "assert_any_call/1 does not support arguments"
end
quote do
Patch.Assertions.assert_any_call(unquote(module), unquote(function))
end
end
@doc """
Asserts that the given module and function has been called with any arity.
```elixir
patch(Example, :function, :patch)
assert_any_call Example, :function # fails
Example.function(1, 2, 3)
assert_any_call Example, :function # passes
```
This function exists for advanced use cases where the module or function are not literals in the
test code. If they are literals then `assert_any_call/1` should be preferred.
"""
@spec assert_any_call(module :: module(), function :: atom()) :: nil
defdelegate assert_any_call(module, function), to: Patch.Assertions
@doc """
Given a call will assert that a matching call was observed by the patched function.
This macro fully supports patterns and will perform non-hygienic binding similar to ExUnit's
`assert_receive/3` and `assert_received/2`.
```elixir
patch(Example, :function, :patch)
Example.function(1, 2, 3)
assert_called Example.function(1, 2, 3) # passes
assert_called Example.function(1, _, 3) # passes
assert_called Example.function(4, 5, 6) # fails
assert_called Example.function(4, _, 6) # fails
```
"""
@spec assert_called(Macro.t()) :: Macro.t()
defmacro assert_called(call) do
quote do
Patch.Assertions.assert_called(unquote(call))
end
end
@doc """
Given a call will assert that a matching call was observed exactly the number of times provided
by the patched function.
This macro fully supports patterns and will perform non-hygienic binding similar to ExUnit's
`assert_receive/3` and `assert_received/2`. Any binds will bind to the latest matching call
values.
```elixir
patch(Example, :function, :patch)
Example.function(1, 2, 3)
assert_called Example.function(1, 2, 3), 1 # passes
assert_called Example.function(1, _, 3), 1 # passes
Example.function(1, 2, 3)
assert_called Example.function(1, 2, 3), 2 # passes
assert_called Example.function(1, _, 3), 2 # passes
```
"""
@spec assert_called(call :: Macro.t(), count :: Macro.t()) :: Macro.t()
defmacro assert_called(call, count) do
quote do
Patch.Assertions.assert_called(unquote(call), unquote(count))
end
end
@doc """
Given a call will assert that a matching call was observed exactly once by the patched function.
This macro fully supports patterns and will perform non-hygienic binding similar to ExUnit's
`assert_receive/3` and `assert_received/2`.
```elixir
patch(Example, :function, :patch)
Example.function(1, 2, 3)
assert_called_once Example.function(1, 2, 3) # passes
assert_called_once Example.function(1, _, 3) # passes
Example.function(1, 2, 3)
assert_called_once Example.function(1, 2, 3) # fails
assert_called_once Example.function(1, _, 3) # fails
```
"""
@spec assert_called_once(call :: Macro.t()) :: Macro.t()
defmacro assert_called_once(call) do
quote do
Patch.Assertions.assert_called_once(unquote(call))
end
end
@doc """
Expose can be used to turn private functions into public functions for the
purpose of testing them.
To expose every private function as a public function, pass the sentinel value `:all`.
```elixir
expose(Example, :all)
```
Otherwise pass a `Keyword.t(arity)` of the functions to expose.
For example, if one wanted to expose `private_function/1` and `private_function/2`.
```elixir
expose(Example, [private_function: 1, private_function: 2])
```
After exposing a function, attempting to call the exposed function will cause the Elixir
Compiler to flag calls to exposed functions as a warning. There is a companion macro
`private/1` that test authors can wrap their calls with to prevent warnings.
"""
@spec expose(module :: module, exposes :: Patch.Mock.exposes()) :: :ok | {:error, term()}
def expose(module, exposes) do
Mock.expose(module, exposes)
end
@doc """
Fakes out a module with an alternative implementation.
The real module can still be accessed with `real/1`.
For example, if your project has the module `Example.Datastore` and there's a fake available in the testing
environment named `Example.Test.InMemoryDatastore` the following table describes which calls are executed by which
code before and after faking with the following call.
```elixir
fake(Example.Datastore, Example.Test.InMemoryDatastore)
```
| Calling Code | Responding Module before fake/2 | Responding Module after fake/2 |
|--------------------------------------|--------------------------------------|--------------------------------------|
| Example.Datastore.get/1 | Example.Datastore.get/1 | Example.Test.InMemoryDatastore.get/1 |
| Example.Test.InMemoryDatastore.get/1 | Example.Test.InMemoryDatastore.get/1 | Example.Test.InMemoryDatastore.get/1 |
| real(Example.Datastore).get/1 | (UndefinedFunctionError) | Example.Datastore.get/1 |
The fake module can use the renamed module to access the original implementation.
"""
@spec fake(real_module :: module(), fake_module :: module()) :: :ok
def fake(real_module, fake_module) do
{:ok, _} = Mock.module(real_module)
real_functions = Patch.Reflection.find_functions(real_module)
fake_functions = Patch.Reflection.find_functions(fake_module)
Enum.each(fake_functions, fn {name, arity} ->
is_real_function? = Enum.any?(real_functions, &match?({^name, ^arity}, &1))
if is_real_function? do
patch(
real_module,
name,
callable(fn args ->
apply(fake_module, name, args)
end, :list)
)
end
end)
end
@spec inject(
tag :: Patch.Listener.tag(),
target :: Patch.Listener.target(),
keys :: [term(), ...],
options :: [Patch.Listener.option()]
) :: {:ok, pid()} | {:error, :not_found} | {:error, :invalid_keys}
def inject(tag, target, keys, options \\ []) do
state = :sys.get_state(target)
case Patch.Access.fetch(state, keys) do
{:ok, subject} ->
with {:ok, listener} <- listen(tag, subject, options) do
replace(target, keys, listener)
{:ok, listener}
end
:error ->
{:error, :invalid_keys}
end
end
@doc """
Get all the observed calls to a module. These calls are expressed as a `{name, argument}` tuple
and can either be provided in ascending (oldest first) or descending (newest first) order by
providing a sorting of `:asc` or `:desc`, respectively.
```elixir
Example.example(1, 2, 3)
Example.function(:a)
assert history(Example) == [{:example, [1, 2, 3]}, {:function, [:a]}]
assert history(Example, :desc) == [{:function, [:a]}, {:example, [1, 2, 3]}]
```
For asserting or refuting that a call happened the `assert_called/1`, `assert_any_call/2`,
`refute_called/1`, and `refute_any_call/2` functions provide a more convenient API.
"""
@spec history(module :: module(), sorting :: :asc | :desc) :: [Mock.History.entry()]
def history(module, sorting \\ :asc) do
module
|> Mock.history()
|> Mock.History.entries(sorting)
end
@doc """
Starts a listener process.
Each listener should provide a unique `tag` that will be used when forwarding messages to the
test process.
When used on a named process, this is sufficient to begin intercepting all messages to the named
process.
```elixir
listen(:listener, Example)
```
When used on an unnamed process, the process that is spawned will forward any messages to the
caller and target process but any processes holding a reference to the old pid will need to be
updated.
`inject/3` can be used to inject a listener into a running process.
```elixir
{:ok, listener} = listen(:listener, original)
inject(target, :original, listener)
```
"""
@spec listen(
tag :: Patch.Listener.tag(),
target :: Patch.Listener.target(),
options :: [Patch.Listener.option()]
) :: {:ok, pid()} | {:error, :not_found}
def listen(tag, target, options \\ []) do
Patch.Listener.Supervisor.start_child(self(), tag, target, options)
end
@doc """
Patches a function in a module
When called with a function the function will be called instead of the original function and its
results returned.
```elixir
patch(Example, :function, fn arg -> {:mock, arg} end)
assert Example.function(:test) == {:mock, :test}
```
To handle multiple arities create a `callable/2` with the `:list` option and the arguments will
be wrapped to the function in a list.
```elixir
patch(Example, :function, callable(fn
[] ->
:zero
[a] ->
{:one, a}
[a, b] ->
{:two, a, b}
end, :list))
assert Example.function() == :zero
assert Example.function(1) == {:one, 1}
assert Example.function(1, 2) == {:two, 1, 2}
```
To provide a function as a literal value to be returned, use the `scalar/1` function.
```elixir
patch(Example, :function, scalar(fn arg -> {:mock, arg} end))
callable = Example.function()
assert callable.(:test) == {:mock, :test}
```
The function `cycle/1` can be given a list which will be infinitely cycled when the function is
called.
```elixir
patch(Example, :function, cycle([1, 2, 3]))
assert Example.function() == 1
assert Example.function() == 2
assert Example.function() == 3
assert Example.function() == 1
assert Example.function() == 2
assert Example.function() == 3
assert Example.function() == 1
```
The function `raises/1` can be used to `raise/1` a `RuntimeError` when the function is called.
```elixir
patch(Example, :function, raises("patched"))
assert_raise RuntimeError, "patched", fn ->
Example.function()
end
```
The function `raises/2` can be used to `raise/2` any exception with any attributes when the function
is called.
```elixir
patch(Example, :function, raises(ArgumentError, message: "patched"))
assert_raise ArgumentError, "patched", fn ->
Example.function()
end
```
The function `sequence/1` can be given a list which will be used until a singal value is
remaining, the remaining value will be returned on all subsequent calls.
```elixir
patch(Example, :function, sequence([1, 2, 3]))
assert Example.function() == 1
assert Example.function() == 2
assert Example.function() == 3
assert Example.function() == 3
assert Example.function() == 3
assert Example.function() == 3
assert Example.function() == 3
```
The function `throws/1` can be given a value to `throw/1` when the function is called.
```elixir
patch(Example, :function, throws(:patched))
assert catch_throw(Example.function()) == :patched
```
Any other value will be returned as a literal scalar value when the function is called.
```elixir
patch(Example, :function, :patched)
assert Example.function() == :patched
```
"""
@spec patch(module :: module(), function :: atom(), value :: Value.t()) :: Value.t()
def patch(module, function, %value_module{} = value) when is_value(value_module) do
{:ok, _} = Patch.Mock.module(module)
:ok = Patch.Mock.register(module, function, value)
value
end
@spec patch(module :: module(), function :: atom(), callable) :: callable when callable: function()
def patch(module, function, callable) when is_function(callable) do
patch(module, function, callable(callable))
callable
end
@spec patch(module :: module(), function :: atom(), return_value) :: return_value
when return_value: term()
def patch(module, function, return_value) do
patch(module, function, scalar(return_value))
return_value
end
@doc """
Suppress warnings for using exposed private functions in tests.
Patch allows you to make a private function public via the `expose/2` function. Exposure
happens dynamically at test time. The Elixir Compiler will flag calls to exposed functions as a
warning.
One way around this is to change the normal function call into an `apply/3` but this is
cumbersome and makes tests harder to read.
This macro just rewrites a normal looking call into an `apply/3` so the compiler won't complain
about calling an exposed function.
```elixir
expose(Example, :all)
patch(Example, :private_function, :patched)
assert Example.private_function() == :patched # Compiler will warn about call to undefined function
assert apply(Example, :private_function, []) == :patched # Compiler will not warn
assert private(Example.private_function()) == :patched # Same as previous line, but looks nicer.
```
"""
@spec private(Macro.t()) :: Macro.t()
defmacro private(call) do
{module, function, arguments} = Macro.decompose_call(call)
quote do
apply(unquote(module), unquote(function), unquote(arguments))
end
end
@doc """
Gets the real module name for a fake.
This is useful for Fakes that want to defer some part of the functionality back to the real
module.
```elixir
def Example do
def calculate(a) do
# ...snip some complex calculations...
result
end
end
def Example.Fake do
import Patch, only: [real: 1]
def calculate(a) do
real_result = real(Example).calculate(a)
{:fake, real_result}
end
end
"""
@spec real(module :: module()) :: module()
def real(module) do
Naming.original(module)
end
@doc """
Refutes that the given module and function has been called with any arity.
```elixir
patch(Example, :function, :patch)
refute_any_call Example.function # passes
Example.function(1, 2, 3)
refute_any_call Example.function # fails
```
"""
@spec refute_any_call(call :: Macro.t()) :: Macro.t()
defmacro refute_any_call(call) do
{module, function, arguments} = Macro.decompose_call(call)
unless Enum.empty?(arguments) do
raise InvalidAnyCall, message: "refute_any_call/1 does not support arguments"
end
quote do
Patch.Assertions.refute_any_call(unquote(module), unquote(function))
end
end
@doc """
Refutes that the given module and function has been called with any arity.
```elixir
patch(Example, :function, :patch)
refute_any_call Example, :function # passes
Example.function(1, 2, 3)
refute_any_call Example, :function # fails
```
This function exists for advanced use cases where the module or function are not literals in the
test code. If they are literals then `refute_any_call/1` should be preferred.
"""
@spec refute_any_call(module :: module(), function :: atom()) :: nil
defdelegate refute_any_call(module, function), to: Patch.Assertions
@doc """
Given a call will refute that a matching call was observed by the patched function.
This macro fully supports patterns.
```elixir
patch(Example, :function, :patch)
Example.function(1, 2, 3)
refute_called Example.function(4, 5, 6) # passes
refute_called Example.function(4, _, 6) # passes
refute_called Example.function(1, 2, 3) # fails
refute_called Example.function(1, _, 3) # fails
```
"""
@spec refute_called(call :: Macro.t()) :: Macro.t()
defmacro refute_called(call) do
quote do
Patch.Assertions.refute_called(unquote(call))
end
end
@doc """
Given a call will refute that a matching call was observed exactly the number of times provided
by the patched function.
This macro fully supports patterns.
```elixir
patch(Example, :function, :patch)
Example.function(1, 2, 3)
refute_called Example.function(1, 2, 3), 2 # passes
refute_called Example.function(1, _, 3), 2 # passes
Example.function(1, 2, 3)
refute_called Example.function(1, 2, 3), 1 # passes
refute_called Example.function(1, _, 3), 1 # passes
```
"""
@spec refute_called(call :: Macro.t(), count :: Macro.t()) :: Macro.t()
defmacro refute_called(call, count) do
quote do
Patch.Assertions.refute_called(unquote(call), unquote(count))
end
end
@doc """
Given a call will refute that a matching call was observed exactly once by the patched function.
This macro fully supports patterns.
```elixir
patch(Example, :function, :patch)
Example.function(1, 2, 3)
refute_called_once Example.function(1, 2, 3) # fails
refute_called_once Example.function(1, _, 3) # fails
Example.function(1, 2, 3)
refute_called_once Example.function(1, 2, 3) # passes
refute_called_once Example.function(1, _, 3) # passes
```
"""
@spec refute_called_once(call :: Macro.t()) :: Macro.t()
defmacro refute_called_once(call) do
quote do
Patch.Assertions.refute_called_once(unquote(call))
end
end
@doc """
Convenience function for replacing part of the state of a running process.
Uses the `Access` module to traverse the state structure according to the given `keys`.
Structs have special handling so that they can be updated without having to implement the
`Access` behavior.
For example to replace the key `:key` in the map found under the key `:map` with the value
`:replaced`
```elixir
replace(target, [:map, :key], :replaced)
```
"""
@spec replace(target :: GenServer.server(), keys :: [term(), ...], value :: term()) :: term()
def replace(target, keys, value) do
:sys.replace_state(target, &Patch.Access.put(&1, keys, value))
end
@doc """
Remove any mocks or spies from the given module
```elixir
original = Example.example()
patch(Example, :example, :patched)
assert Example.example() == :patched
restore(Example)
assert Example.example() == original
```
"""
@spec restore(module :: module()) :: :ok | {:error, term()}
def restore(module) do
Mock.restore(module)
end
@doc """
Remove any patches associated with a function in a module.
```elixir
original = Example.example()
patch(Example, :example, :example_patch)
patch(Example, :other, :other_patch)
assert Example.example() == :example_patch
assert Example.other() == :other_patch
restore(Example, :example)
assert Example.example() == original
assert Example.other() == :other_patch
"""
@spec restore(module :: module(), name :: atom()) :: :ok | {:error, term()}
def restore(module, name) do
Mock.restore(module, name)
end
@doc """
Spies on the provided module
Once a module has been spied on the calls to that module can be asserted / refuted without
changing the behavior of the module.
```elixir
spy(Example)
Example.example(1, 2, 3)
assert_called Example.example(1, 2, 3) # passes
"""
@spec spy(module :: module()) :: :ok
def spy(module) do
{:ok, _} = Mock.module(module)
:ok
end
end
|
lib/patch.ex
| 0.861494
| 0.869936
|
patch.ex
|
starcoder
|
defmodule Millisecond do
@moduledoc """
A tiny library to parse human readable formats into milliseconds.
"""
@doc """
A tiny library to parse human readable formats into milliseconds to
make configurations easier.
## Examples
iex> Millisecond.ms("100")
{:ok, 100}
iex> Millisecond.ms("1s")
{:ok, 1_000}
iex> Millisecond.ms!("1s")
1_000
iex> Millisecond.ms("1m")
{:ok, 60_000}
iex> Millisecond.ms("1.5m")
{:ok, 9.0e4}
iex> Millisecond.ms("-0.5m")
{:ok, -3.0e4}
iex> Millisecond.ms("1h")
{:ok, 3_600_000}
iex> Millisecond.ms("1h 1m 1s")
{:ok, 3_661_000}
iex> Millisecond.ms("1y 1mo 1d")
{:ok, 34_236_000_000}
iex> Millisecond.ms("RANDOM STRING")
:error
iex> Millisecond.ms!("1nvalid")
** (ArgumentError) Format is invalid: \"1nvalid\"
iex> Millisecond.ms("1d 1mo 1y")
:error
iex> Millisecond.ms("1hour 1minute 1second")
{:ok, 3_661_000}
iex> Millisecond.ms("1 minutes 1 milliseconds")
{:ok, 60001}
iex> Millisecond.ms("1year 1month 1day")
{:ok, 34_236_000_000}
"""
@second_ms 1000
@minute_ms 60 * @second_ms
@hour_ms 60 * @minute_ms
@day_ms 24 * @hour_ms
@week_ms 7 * @day_ms
@month_ms 30 * @day_ms
@year_ms round(365.25 * @day_ms)
@steps [
:year,
:month,
:week,
:day,
:hour,
:minute,
:second,
:millisecond
]
@type t :: %__MODULE__{
year: number(),
month: number(),
week: number(),
day: number(),
hour: number(),
minute: number(),
second: number(),
millisecond: number()
}
@type millisecond :: pos_integer()
defstruct @steps
@doc """
Converts a string format into milliseconds.
The main function of this library.
## Example
iex> import Millisecond, only: [ms: 1, ms!: 1]
iex> ms!('2 days')
172800000
iex> ms!('1d')
86400000
iex> ms!('10h')
36000000
iex> ms!('2.5 hrs')
9000000
iex> ms!('2h')
7200000
iex> ms!('1m')
60000
iex> ms!('5s')
5000
iex> ms!('1y')
31557600000
iex> ms!('100')
100
iex> ms!('-3 days')
-259200000
iex> ms!('-1h')
-3600000
iex> ms!('-200')
-200
"""
@spec ms(charlist) :: {:ok, millisecond} | :error
def ms(text) do
case parse(text) do
{:ok, data} ->
{:ok, to_milliseconds(data)}
:error ->
:error
end
end
@doc """
This is `ms/1` but returns milliseconds directly on success and raises
an error otherwise.
"""
@spec ms!(charlist) :: millisecond
def ms!(text) do
text
|> parse!()
|> to_milliseconds()
end
@doc """
Converts a string to a `Millisecond` struct.
This is intended to be an low-level function to mainly separate
parsing and conversion.
## Examples
iex> Millisecond.parse("1h 1m 1s")
{:ok, %Millisecond{hour: 1, minute: 1, second: 1}}
iex> Millisecond.parse("invalid format")
:error
"""
@spec parse(charlist()) :: {:ok, t()} | :error
def parse(text) when is_binary(text) do
text
|> String.trim()
|> String.downcase()
|> String.split(" ", trim: true, parts: 3 * length(@steps))
|> Enum.map(&parse_number/1)
|> (fn units ->
case units do
[{:error, _}] ->
:error
[{_, _}] ->
do_process(units)
units ->
units
|> Enum.reduce_while({nil, []}, fn res, {quantity, grouped_units} ->
case res do
{:error, _} when is_nil(quantity) ->
{:halt, :error}
{:error, unit} ->
{:cont, {nil, [{quantity, unit} | grouped_units]}}
{new_quantity, ""} when is_nil(quantity) ->
{:cont, {new_quantity, grouped_units}}
{_, ""} ->
{:halt, :error}
unit ->
{:cont, {nil, [unit | grouped_units]}}
end
end)
|> case do
:error ->
:error
{nil, grouped_units} ->
grouped_units
|> Enum.reverse()
|> do_process()
{_, _} ->
:error
end
end
end).()
end
def parse(_), do: :error
@doc """
This is `parse/1` but returns the data directly on success and raises
an error otherwise.
"""
@spec parse!(charlist) :: t()
def parse!(text) do
case parse(text) do
{:ok, mil1iseconds} -> mil1iseconds
:error -> raise ArgumentError, "Format is invalid: #{inspect(text)}"
end
end
defp parse_number(text) do
if String.contains?(text, ".") do
Float.parse(text)
else
Integer.parse(text)
end
|> case do
:error -> {:error, text}
res -> res
end
end
defp do_process([]), do: :error
defp do_process(values) do
@steps
|> Enum.reduce_while({%Millisecond{}, values}, fn step, acc ->
with {state, [value | remaining_values]} <- acc,
{quantity, unit} <- value,
true <- do_parse_unit(step, unit) do
{:cont, {Map.put(state, step, quantity), remaining_values}}
else
false -> {:cont, acc}
_ -> {:halt, acc}
end
end)
|> case do
{state, []} ->
{:ok, state}
_ ->
:error
end
end
@year_units ["years", "year", "yrs", "yr", "y"]
defp do_parse_unit(:year, unit) when unit in @year_units, do: true
@month_units ["months", "month", "mo"]
defp do_parse_unit(:month, unit) when unit in @month_units, do: true
@week_units ["weeks", "week", "w"]
defp do_parse_unit(:week, unit) when unit in @week_units, do: true
@day_units ["days", "day", "d"]
defp do_parse_unit(:day, unit) when unit in @day_units, do: true
@hour_units ["hours", "hour", "hrs", "hr", "h"]
defp do_parse_unit(:hour, unit) when unit in @hour_units, do: true
@minute_units ["minutes", "minute", "mins", "min", "m"]
defp do_parse_unit(:minute, unit) when unit in @minute_units,
do: true
@second_units ["seconds", "second", "secs", "sec", "s"]
defp do_parse_unit(:second, unit) when unit in @second_units, do: true
@millisecond_units ["milliseconds", "millisecond", "msecs", "msec", "ms"]
defp do_parse_unit(:millisecond, unit) when unit in @millisecond_units, do: true
defp do_parse_unit(:millisecond, ""), do: true
defp do_parse_unit(_, _), do: false
@doc """
Converts a `Millisecond` struct to the intended milliseconds format.
This is intended to be an low-level function to mainly separate
parsing and conversion.
## Examples
iex> data = Millisecond.parse!("1h 1m 1s")
iex> Millisecond.to_milliseconds(data)
3_661_000
iex> data = Millisecond.parse!("1y 1mo 1d")
iex> Millisecond.to_milliseconds(data)
34_236_000_000
"""
@spec to_milliseconds(t()) :: millisecond()
def to_milliseconds(%Millisecond{} = data) do
@steps
|> Enum.reduce(0, fn step, acc ->
if multiplier = Map.get(data, step, nil) do
value =
case step do
:year -> @year_ms
:month -> @month_ms
:week -> @week_ms
:day -> @day_ms
:hour -> @hour_ms
:minute -> @minute_ms
:second -> @second_ms
:millisecond -> 1
end
acc + multiplier * value
else
acc
end
end)
end
@doc """
Adds the `Millisecond` to a `DateTime` to produce a future datetime.
THis is an example of its intended use for configuration.
## Examples
iex> ms = Millisecond.parse!("100ms")
iex> now = DateTime.utc_now()
iex> now |> Millisecond.add(ms) |> DateTime.diff(now, :millisecond)
100
"""
@spec add(DateTime.t(), t()) :: DateTime.t()
def add(%DateTime{} = datetime, %Millisecond{} = data) do
data
|> to_milliseconds()
|> (&DateTime.add(datetime, &1, :millisecond)).()
end
@doc """
Subtracts the `Millisecond` from a `DateTime` to produce a past datetime.
THis is an example of its intended use for configuration.
## Examples
iex> ms = Millisecond.parse!("100ms")
iex> now = DateTime.utc_now()
iex> now |> Millisecond.subtract(ms) |> DateTime.diff(now, :millisecond)
-100
"""
@spec subtract(DateTime.t(), t()) :: DateTime.t()
def subtract(%DateTime{} = datetime, %Millisecond{} = data) do
data
|> to_milliseconds()
|> (&DateTime.add(datetime, -&1, :millisecond)).()
end
if Code.ensure_compiled(Timex) == {:module, Timex} do
@doc """
Converts an `Millisecond` struct into a `Timex.Duration` struct.
This is inteded to show that `Millisecond` can be converted into a
more appropriate date/time struct such `Timex.Duration`. If you have
`Timex` already, perhaps use `Timex.Duration.parse/1` as it uses the
ISO 8601 Duration format.
## Examples
iex> {:ok, data} = Millisecond.parse("1h 1s")
iex> duration = Millisecond.to_duration(data)
iex> Timex.Duration.to_string(duration)
"PT1H1S"
iex> {:ok, data} = Millisecond.parse("1d 1h")
iex> {:ok, duration} = Timex.Duration.parse("P1DT1H")
iex> Millisecond.to_duration(data) == duration
true
"""
@spec to_duration(t()) :: Timex.Duration.t()
def to_duration(data) do
alias Timex.Duration
Enum.reduce(@steps, Duration.zero(), fn step, acc ->
if multiplier = Map.get(data, step, nil) do
duration =
case step do
:year -> Duration.from_days(365 * multiplier)
:month -> Duration.from_days(30 * multiplier)
:week -> Duration.from_days(7 * multiplier)
:day -> Duration.from_days(multiplier)
:hour -> Duration.from_hours(multiplier)
:minute -> Duration.from_minutes(multiplier)
:second -> Duration.from_seconds(multiplier)
:millisecond -> Duration.from_milliseconds(multiplier)
end
Duration.add(acc, duration)
else
acc
end
end)
end
end
end
|
lib/ex_ms.ex
| 0.928449
| 0.400456
|
ex_ms.ex
|
starcoder
|
defmodule Lapin.Connection do
@moduledoc """
RabbitMQ connection handler
This module handles the RabbitMQ connection. It also provides a behaviour for
worker module implementation. The worker module should use the `Lapin.Connection`
behaviour and implement the callbacks it needs.
When using the `Lapin.Connection` behaviour a `publish/4` function is injected in
the worker module as a shortcut to the `Lapin.Connection.publish/5` function
which removes the need for passing in the connection and is publicly callable
to publish messages on the connection configured for the implementing module.
"""
use GenServer
use AMQP
require Logger
import Lapin.Utils, only: [check_mandatory_params: 2]
alias Lapin.{Message, Channel}
@typedoc """
Connection configuration
The following keys are supported:
- module: module using the `Lapin.Connection` behaviour
- host: broker hostname (string | charlist), *default: 'localhost'*
- port: broker port (string | integer), *default: 5672*
- virtual_host: broker vhost (string), *default: ""*
- username: username (string)
- password: password (string)
- auth_mechanisms: broker auth_mechanisms ([:amqplain | :external | :plain]), *default: amqp_client default*
- ssl_options: ssl options ([:ssl:ssl_option]), *default: none*
- channels: channels to configure ([Channel.config]), *default: []*
"""
@type config :: [channels: [Channel.config]]
@typedoc "Connection"
@type t :: GenServer.server
@typedoc "Callback result"
@type on_callback :: :ok | {:error, message :: String.t}
@typedoc "Reason for message rejection"
@type reason :: term
@typedoc "`handle_deliver/2` callback result"
@type on_deliver :: :ok | {:reject, reason} | term
@doc """
Called when receiving a `basic.cancel` from the broker.
"""
@callback handle_cancel(Channel.t) :: on_callback
@doc """
Called when receiving a `basic.cancel_ok` from the broker.
"""
@callback handle_cancel_ok(Channel.t) :: on_callback
@doc """
Called when receiving a `basic.consume_ok` from the broker.
This signals successul registration as a consumer.
"""
@callback handle_consume_ok(Channel.t) :: on_callback
@doc """
Called when receiving a `basic.deliver` from the broker.
Return values from this callback determine message acknowledgement:
- `:ok`: Message was processed by the consumer and should be removed from queue
- `{:reject, reason}`: Message was not processed and should be rejected
Any other return value requeues the message to prevent data loss.
A crash in the callback code will however reject the message to prevent loops
if the message was already delivered before.
The `reason` term can be used by the application
to signal the reason of rejection and is logged in debug.
"""
@callback handle_deliver(Channel.t, Message.t) :: on_deliver
@doc """
Called when completing a `basic.publish` with the broker.
Message transmission to the broker is successful when this callback is called.
"""
@callback handle_publish(Channel.t, Message.t) :: on_callback
@doc """
Called when receiving a `basic.return` from the broker.
This signals an undeliverable returned message from the broker.
"""
@callback handle_return(Channel.t, Message.t) :: on_callback
@doc """
Called before `handle_deliver/2` to get the payload type.
Should return a data type instance to decode the payload into.
A `Lapin.Message.Payload` implementation must be provided for this type. The
default implementation leaves the payload unaltered.
"""
@callback payload_for(Channel.t, Message.t) :: Message.Payload.t
defmacro __using__(_) do
quote do
alias Lapin.{Channel, Message}
@behaviour Lapin.Connection
def handle_cancel(_channel), do: :ok
def handle_cancel_ok(_channel), do: :ok
def handle_consume_ok(_channel), do: :ok
def handle_deliver(_channel, _message), do: :ok
def handle_publish(_channel, _message), do: :ok
def handle_return(_channel, _message), do: :ok
def payload_for(_channel, _message), do: <<>>
defoverridable Lapin.Connection
def publish(exchange, routing_key, message, options \\ []) do
Lapin.Connection.publish(__MODULE__, exchange, routing_key, message, options)
end
end
end
@default_reconnection_delay 5_000
@connection_default_params [connecion_timeout: @default_reconnection_delay]
@default_rabbitmq_host 'localhost'
@default_rabbitmq_port 5672
@doc """
Starts a `Lapin.Connection` with the specified configuration
"""
@spec start_link(config, options :: GenServer.options) :: GenServer.on_start
def start_link(configuration, options \\ []) do
{:ok, configuration} = cleanup_configuration(configuration)
GenServer.start_link(__MODULE__, configuration, options)
end
def init(configuration) do
{:ok, module, connection, channels} = connect(configuration)
{:ok, %{channels: channels, connection: connection, module: module, configuration: configuration}}
end
@doc """
Closes the connection
"""
@spec close(connection :: t) :: GenServer.on_callback
def close(connection), do: GenServer.stop(connection)
def terminate(_reason, %{connection: connection}) do
Connection.close(connection)
end
@doc """
Publishes a message to the specified exchange with the given routing_key
"""
@spec publish(connection :: t, Channel.exchange, Channel.routing_key, Message.Payload.t, options :: Keyword.t) :: on_callback
def publish(connection, exchange, routing_key, payload, options \\ []) do
GenServer.call(connection, {:publish, exchange, routing_key, payload, options})
end
def handle_call({:publish, exchange, routing_key, payload, options}, _from, %{channels: channels, module: module} = state) do
with channel when not is_nil(channel) <- Channel.get(channels, exchange, routing_key, :producer),
%Channel{pattern: pattern} <- channel,
amqp_channel when not is_nil(amqp_channel) <- channel.amqp_channel,
mandatory <- pattern.publisher_mandatory(channel),
persistent <- pattern.publisher_persistent(channel),
options <- Keyword.merge([mandatory: mandatory, persistent: persistent], options),
content_type <- Message.Payload.content_type(payload),
meta <- %{content_type: content_type},
{:ok, payload} <- Message.Payload.encode(payload),
:ok <- Basic.publish(amqp_channel, exchange, routing_key, payload, options) do
message = %Message{meta: Enum.into(options, meta), payload: payload}
if not pattern.publisher_confirm(channel) or Confirm.wait_for_confirms(amqp_channel) do
Logger.debug fn -> "Published #{inspect message} on #{inspect channel}" end
{:reply, module.handle_publish(channel, message), state}
else
error = "Error publishing #{inspect message}"
Logger.debug fn -> error end
{:reply, {:error, error}, state}
end
else
:passive ->
error = "Cannot publish, channel role is :passive"
Logger.error error
{:reply, {:error, error}, state}
:consumer ->
error = "Cannot publish, channel role is :consumer"
Logger.error error
{:reply, {:error, error}, state}
nil ->
error = "Error publishing message: no channel for exchange '#{exchange}' with routing key '#{routing_key}'"
Logger.debug fn -> error end
{:reply, {:error, error}, state}
{:error, error} ->
Logger.debug fn -> "Error sending message: #{inspect error}" end
{:reply, {:error, error}, state}
end
end
def handle_info({:basic_cancel, %{consumer_tag: consumer_tag}}, %{channels: channels, module: module} = state) do
with channel when not is_nil(channel) <- Channel.get(channels, consumer_tag) do
Logger.debug fn -> "Broker cancelled consumer for #{inspect channel}" end
module.handle_cancel(channel)
else
nil ->
Logger.warn "Broker cancelled consumer_tag '#{consumer_tag}' for locally unknown channel"
{:error, error} ->
Logger.error "Error canceling consumer_tag '#{consumer_tag}': #{error}"
end
{:stop, :normal, state}
end
def handle_info({:basic_cancel_ok, %{consumer_tag: consumer_tag}}, %{channels: channels, module: module} = state) do
with channel when not is_nil(channel) <- Channel.get(channels, consumer_tag),
:ok <- module.handle_cancel_ok(channel) do
Logger.debug fn -> "Broker confirmed cancelling consumer for #{inspect channel}" end
else
nil ->
Logger.debug fn -> "Broker confirmed cancelling consumer for locally unknown tag '#{consumer_tag}'" end
error ->
Logger.error "Error handling broker cancel for '#{consumer_tag}': #{inspect error}"
end
{:noreply, state}
end
def handle_info({:basic_consume_ok, %{consumer_tag: consumer_tag}}, %{channels: channels, module: module} = state) do
with channel when not is_nil(channel) <- Channel.get(channels, consumer_tag),
:ok <- module.handle_consume_ok(channel) do
Logger.debug fn -> "Broker registered consumer for #{inspect channel}" end
else
nil ->
Logger.warn "Broker registered consumer_tag '#{consumer_tag}' for locally unknown channel"
error ->
Logger.error "Error handling broker register for '#{consumer_tag}': #{inspect error}"
end
{:noreply, state}
end
def handle_info({:basic_return, payload, %{exchange: exchange, routing_key: routing_key} = meta}, %{channels: channels, module: module} = state) do
message = %Message{meta: meta, payload: payload}
with channel when not is_nil(channel) <- Channel.get(channels, exchange, routing_key, :producer),
:ok <- module.handle_return(channel, message) do
Logger.debug fn -> "Broker returned message #{inspect message}" end
else
nil ->
Logger.warn "Broker returned message #{inspect message} for locally unknown channel"
error ->
Logger.debug fn -> "Error handling returned message: #{inspect error}" end
end
{:noreply, state}
end
def handle_info({:DOWN, _, :process, _pid, _reason}, state) do
Logger.warn "Connection down, restarting..."
{:stop, :normal, state}
end
def handle_info({:basic_deliver, payload, %{consumer_tag: consumer_tag} = meta}, %{channels: channels, module: module} = state) do
message = %Message{meta: meta, payload: payload}
with channel when not is_nil(channel) <- Channel.get(channels, consumer_tag) do
spawn(fn -> consume(module, channel, meta, payload) end)
else
nil ->
Logger.error "Error processing message #{inspect message}, no local channel"
end
{:noreply, state}
end
defp consume(module, %Channel{pattern: pattern} = channel, %{delivery_tag: delivery_tag, redelivered: redelivered} = meta, payload) do
message = %Message{meta: meta, payload: payload}
with consumer_ack <- pattern.consumer_ack(channel),
payload_for <- module.payload_for(channel, message),
content_type <- Message.Payload.content_type(payload_for),
message <- %Message{message | meta: Map.put(meta, :content_type, content_type)},
{:ok, payload} <- Message.Payload.decode_into(payload_for, payload),
message <- %Message{message | payload: payload},
:ok <- module.handle_deliver(channel, message) do
Logger.debug fn -> "Consuming message #{delivery_tag}" end
consume_ack(consumer_ack, channel.amqp_channel, delivery_tag)
else
{:reject, reason} ->
Basic.reject(channel.amqp_channel, delivery_tag, requeue: false)
Logger.debug fn -> "Rejected message #{delivery_tag}: #{inspect reason}" end
reason ->
Basic.reject(channel.amqp_channel, delivery_tag, requeue: not redelivered)
Logger.debug fn -> "Requeued message #{delivery_tag}: #{inspect reason}" end
end
rescue
exception ->
Basic.reject(channel.amqp_channel, delivery_tag, requeue: not redelivered)
Logger.error "Rejected message #{delivery_tag}: #{inspect exception}"
end
defp consume_ack(true = _consumer_ack, amqp_channel, delivery_tag) do
if Basic.ack(amqp_channel, delivery_tag) do
Logger.debug fn -> "Consumed message #{delivery_tag} successfully, ACK sent" end
:ok
else
Logger.debug fn -> "ACK failed for message #{delivery_tag}" end
:error
end
end
defp consume_ack(false = _consumer_ack, _amqp_channel, delivery_tag) do
Logger.debug fn -> "Consumed message #{delivery_tag}, ACK not required" end
:ok
end
defp connect(configuration) do
with {module, configuration} <- Keyword.pop(configuration, :module),
{channels, configuration} <- Keyword.pop(configuration, :channels, []),
configuration <- Keyword.merge(@connection_default_params, configuration),
{:ok, connection} <- Connection.open(configuration) do
Process.monitor(connection.pid)
{:ok, module, connection, Enum.map(channels, &Channel.create(connection, &1))}
else
{:error, _} ->
:timer.sleep(@default_reconnection_delay)
connect(configuration)
end
end
defp cleanup_configuration(configuration) do
with :ok <- check_mandatory_params(configuration, [:module]),
{_, configuration} <- Keyword.get_and_update(configuration, :host, fn host ->
{host, map_host(host)}
end),
{_, configuration} <- Keyword.get_and_update(configuration, :port, fn port ->
{port, map_port(port)}
end),
{_, configuration} = Keyword.get_and_update(configuration, :auth_mechanisms, fn
mechanisms when is_list(mechanisms) ->
{mechanisms, Enum.map(mechanisms, &map_auth_mechanism(&1))}
_ ->
:pop
end) do
{:ok, configuration}
else
{:error, :missing_params, missing_params} ->
params = Enum.join(missing_params, ", ")
error = "Error creating connection #{inspect configuration}: missing mandatory params: #{params}"
Logger.error error
{:error, error}
end
end
defp map_auth_mechanism(:amqplain), do: &:amqp_auth_mechanisms.amqplain/3
defp map_auth_mechanism(:external), do: &:amqp_auth_mechanisms.external/3
defp map_auth_mechanism(:plain), do: &:amqp_auth_mechanisms.plain/3
defp map_auth_mechanism(auth_mechanism), do: auth_mechanism
defp map_host(nil), do: @default_rabbitmq_host
defp map_host(host) when is_binary(host), do: String.to_charlist(host)
defp map_host(host), do: host
defp map_port(nil), do: @default_rabbitmq_port
defp map_port(port) when is_binary(port), do: String.to_integer(port)
defp map_port(port), do: port
end
|
lib/lapin/connection.ex
| 0.890705
| 0.435061
|
connection.ex
|
starcoder
|
defmodule Ecto.Migration do
@moduledoc """
Migrations are used to modify your database schema over time.
This module provides many helpers for migrating the database,
allowing developers to use Elixir to alter their storage in
a way that is database independent.
Here is an example:
defmodule MyRepo.Migrations.AddWeatherTable do
use Ecto.Migration
def up do
create table("weather") do
add :city, :string, size: 40
add :temp_lo, :integer
add :temp_hi, :integer
add :prcp, :float
timestamps()
end
end
def down do
drop table("weather")
end
end
Note migrations have an `up/0` and `down/0` instructions, where
`up/0` is used to update your database and `down/0` rolls back
the prompted changes.
Ecto provides some mix tasks to help developers work with migrations:
* `mix ecto.gen.migration add_weather_table` - generates a
migration that the user can fill in with particular commands
* `mix ecto.migrate` - migrates a repository
* `mix ecto.rollback` - rolls back a particular migration
Run the `mix help COMMAND` for more information.
## Change
Migrations can also be automatically reversible by implementing
`change/0` instead of `up/0` and `down/0`. For example, the
migration above can be written as:
defmodule MyRepo.Migrations.AddWeatherTable do
use Ecto.Migration
def change do
create table("weather") do
add :city, :string, size: 40
add :temp_lo, :integer
add :temp_hi, :integer
add :prcp, :float
timestamps()
end
end
end
Notice not all commands are reversible though. Trying to rollback
a non-reversible command will raise an `Ecto.MigrationError`.
## Field Types
The Ecto primitive types are mapped to the appropriate database
type by the various database adapters. For example, `:string` is converted to
`:varchar`, `:binary` to `:bits` or `:blob`, and so on.
Similarly, you can pass any field type supported by your database
as long as it maps to an Ecto type. For instance, you can use `:text`,
`:varchar` or `:char` in your migrations as `add :field_name, :text`.
In your Ecto schema, they will all map to the same `:string` type.
Remember, atoms can contain arbitrary characters by enclosing in
double quotes the characters following the colon. So, if you want to use
field type with your database specific options, you can pass atoms containing
these options like `:"int unsigned"`, `:"time without time zone"`.
## Prefixes
Migrations support specifying a table prefix or index prefix which will
target either a schema if using Postgres, or a different database if using
MySQL. If no prefix is provided, the default schema or database is used.
Any reference declared in the table migration refers by default to the table
with the same declared prefix. The prefix is specified in the table options:
def up do
create table("weather", prefix: "north_america") do
add :city, :string, size: 40
add :temp_lo, :integer
add :temp_hi, :integer
add :prcp, :float
add :group_id, references(:groups)
timestamps()
end
create index("weather", [:city], prefix: "north_america")
end
Note: if using MySQL with a prefixed table, you must use the same prefix
for the references since cross database references are not supported.
For both MySQL and Postgres with a prefixed table, you must use the same
prefix for the index field to ensure you index the prefix qualified table.
## Transactions
By default, Ecto runs all migrations inside a transaction. That's not always
ideal: for example, PostgreSQL allows to create/drop indexes concurrently but
only outside of any transaction (see the [PostgreSQL
docs](http://www.postgresql.org/docs/9.2/static/sql-createindex.html#SQL-CREATEINDEX-CONCURRENTLY)).
Migrations can be forced to run outside a transaction by setting the
`@disable_ddl_transaction` module attribute to `true`:
defmodule MyRepo.Migrations.CreateIndexes do
use Ecto.Migration
@disable_ddl_transaction true
def change do
create index("posts", [:slug], concurrently: true)
end
end
Since running migrations outside a transaction can be dangerous, consider
performing very few operations in such migrations.
See the `index/3` function for more information on creating/dropping indexes
concurrently.
## Comments
Migrations where you create or alter a table support specifying table
and column comments, the same can be done when creating constraints
and indexes. At the moment there is support only for Postgres.
def up do
create index("posts", [:name], comment: "Index Comment")
create constraint("products", "price_must_be_positive", check: "price > 0", comment: "Index Comment")
create table("weather", prefix: "north_america", comment: "Table Comment") do
add :city, :string, size: 40, comment: "Column Comment"
timestamps()
end
end
## Repo configuration
The following migration configurations are available for under
a given repository.
* `:migration_source` - Version numbers of migrations will be saved in
`schema_migrations` table but you can configure the table via:
config :app, App.Repo, migration_source: "my_migrations"
* `:migration_primary_key` - Ecto uses the `:id` column with type
`:bigserial` but you can configure it via:
config :app, App.Repo, migration_primary_key: [id: :uuid, type: :binary_id]
* `:migration_timestamps` - Ecto uses type `:naive_datetime` but you
can configure it via:
config :app, App.Repo, migration_timestamps: [type: :utc_datetime]
"""
defmodule Index do
@moduledoc """
Used internally by adapters.
To define an index in a migration, see `Ecto.Migration.index/3`
"""
defstruct table: nil,
prefix: nil,
name: nil,
columns: [],
unique: false,
concurrently: false,
using: nil,
where: nil,
comment: nil,
options: nil
@type t :: %__MODULE__{
table: String.t,
prefix: atom,
name: atom,
columns: [atom | String.t],
unique: boolean,
concurrently: boolean,
using: atom | String.t,
where: atom | String.t,
comment: String.t | nil,
options: String.t
}
end
defmodule Table do
@moduledoc """
Used internally by adapters.
To define a table in a migration, see `Ecto.Migration.table/2`
"""
defstruct name: nil, prefix: nil, comment: nil, primary_key: true, engine: nil, options: nil
@type t :: %__MODULE__{name: String.t, prefix: atom | nil, comment: String.t | nil, primary_key: boolean,
engine: atom, options: String.t}
end
defmodule Reference do
@moduledoc """
Used internally by adapters.
To define a reference in a migration, see `Ecto.Migration.references/2`
"""
defstruct name: nil, table: nil, column: :id, type: :bigserial, on_delete: :nothing, on_update: :nothing
@type t :: %__MODULE__{table: String.t, column: atom, type: atom, on_delete: atom, on_update: atom}
end
defmodule Constraint do
@moduledoc """
Used internally by adapters.
To define a constraint in a migration, see `Ecto.Migration.constraint/3`
"""
defstruct name: nil, table: nil, check: nil, exclude: nil, prefix: nil, comment: nil
@type t :: %__MODULE__{name: atom, table: String.t, prefix: atom | nil,
check: String.t | nil, exclude: String.t | nil, comment: String.t | nil}
end
defmodule Command do
@moduledoc """
Used internally by adapters.
This represents the up and down legs of a reversible raw command
that is usually define with `Ecto.Migration.execute/1`.
To define a reversible command in a migration, see `Ecto.Migration.execute/2`
"""
defstruct up: nil, down: nil
@type t :: %__MODULE__{up: String.t, down: String.t}
end
alias Ecto.Migration.Runner
@doc false
defmacro __using__(_) do
quote location: :keep do
import Ecto.Migration
@disable_ddl_transaction false
@before_compile Ecto.Migration
end
end
@doc false
defmacro __before_compile__(_env) do
quote do
def __migration__,
do: [disable_ddl_transaction: @disable_ddl_transaction]
end
end
@doc """
Creates a table.
By default, the table will also include a primary_key of name `:id`
and type `:bigserial`. Check `table/2` docs for more information.
## Examples
create table(:posts) do
add :title, :string, default: "Untitled"
add :body, :text
timestamps()
end
"""
defmacro create(object, do: block) do
expand_create(object, :create, block)
end
@doc """
Creates a table if it does not exist.
Works just like `create/2` but does not raise an error when table
already exists.
"""
defmacro create_if_not_exists(object, do: block) do
expand_create(object, :create_if_not_exists, block)
end
defp expand_create(object, command, block) do
quote do
table = %Table{} = unquote(object)
Runner.start_command({unquote(command), Ecto.Migration.__prefix__(table)})
if table.primary_key do
opts = Runner.repo_config(:migration_primary_key, [])
add(opts[:name] || :id, opts[:type] || :bigserial, primary_key: true)
end
unquote(block)
Runner.end_command
table
end
end
@doc """
Alters a table.
## Examples
alter table("posts") do
add :summary, :text
modify :title, :text
remove :views
end
"""
defmacro alter(object, do: block) do
quote do
table = %Table{} = unquote(object)
Runner.start_command({:alter, Ecto.Migration.__prefix__(table)})
unquote(block)
Runner.end_command
end
end
@doc """
Creates one of the following:
* an index
* a table with only an `:id` field
* a constraint
When reversing (in `change` running backward) indexes are only dropped if they
exist and no errors are raised. To enforce dropping an index use `drop/1`.
## Examples
create index("posts", [:name])
create table("version")
create constraint("products", "price_must_be_positive", check: "price > 0")
"""
def create(%Index{} = index) do
Runner.execute {:create, __prefix__(index)}
index
end
def create(%Constraint{} = constraint) do
Runner.execute {:create, __prefix__(constraint)}
constraint
end
def create(%Table{} = table) do
do_create table, :create
table
end
@doc """
Creates an index or a table with only `:id` field if one does not yet exist.
## Examples
create_if_not_exists index("posts", [:name])
create_if_not_exists table("version")
"""
def create_if_not_exists(%Index{} = index) do
Runner.execute {:create_if_not_exists, __prefix__(index)}
end
def create_if_not_exists(%Table{} = table) do
do_create table, :create_if_not_exists
end
defp do_create(table, command) do
columns =
if table.primary_key do
[{:add, :id, :bigserial, primary_key: true}]
else
[]
end
Runner.execute {command, __prefix__(table), columns}
end
@doc """
Drops one of the following:
* an index
* a table
* a constraint
## Examples
drop index("posts", [:name])
drop table("posts")
drop constraint("products", "price_must_be_positive")
"""
def drop(%{} = index_or_table_or_constraint) do
Runner.execute {:drop, __prefix__(index_or_table_or_constraint)}
index_or_table_or_constraint
end
@doc """
Drops a table or index if it exists.
Does not raise an error if table or index does not exist.
## Examples
drop_if_exists index("posts", [:name])
drop_if_exists table("posts")
"""
def drop_if_exists(%{} = index_or_table) do
Runner.execute {:drop_if_exists, __prefix__(index_or_table)}
index_or_table
end
@doc """
Returns a table struct that can be given on create, alter, etc.
## Examples
create table("products") do
add :name, :string
add :price, :decimal
end
drop table("products")
create table("products", primary_key: false) do
add :name, :string
add :price, :decimal
end
## Options
* `:primary_key` - when false, does not generate primary key on table creation
* `:engine` - customizes the table storage for supported databases. For MySQL,
the default is InnoDB
* `:prefix` - the prefix for the table
* `:options` - provide custom options that will be appended after generated
statement, for example "WITH", "INHERITS" or "ON COMMIT" clauses
"""
def table(name, opts \\ [])
def table(name, opts) when is_atom(name) do
table(Atom.to_string(name), opts)
end
def table(name, opts) when is_binary(name) and is_list(opts) do
struct(%Table{name: name}, opts)
end
@doc ~S"""
Returns an index struct that can be used on `create`, `drop`, etc.
Expects the table name as first argument and the index fields as
second. The field can be an atom, representing a column, or a
string representing an expression that is sent as is to the database.
Indexes are non-unique by default.
## Options
* `:name` - the name of the index. Defaults to "#{table}_#{column}_index"
* `:unique` - if the column(s) is unique or not
* `:concurrently` - if the index should be created/dropped concurrently
* `:using` - configures the index type
* `:prefix` - prefix for the index
* `:where` - the conditions for a partial index
## Adding/dropping indexes concurrently
PostgreSQL supports adding/dropping indexes concurrently (see the
[docs](http://www.postgresql.org/docs/9.4/static/sql-createindex.html)).
In order to take advantage of this, the `:concurrently` option needs to be set
to `true` when the index is created/dropped.
**Note**: in order for the `:concurrently` option to work, the migration must
not be run inside a transaction. See the `Ecto.Migration` docs for more
information on running migrations outside of a transaction.
## Index types
PostgreSQL supports several index types like B-tree, Hash or GiST. When
creating an index, the index type defaults to B-tree, but it can be specified
with the `:using` option. The `:using` option can be an atom or a string; its
value is passed to the `USING` clause as is.
More information on index types can be found in the [PostgreSQL
docs](http://www.postgresql.org/docs/9.4/static/indexes-types.html).
## Partial indexes
Databases like PostgreSQL and MSSQL supports partial indexes.
A partial index is an index built over a subset of a table. The subset
is defined by a conditional expression using the `:where` option.
The `:where` option can be an atom or a string; its value is passed
to the `WHERE` clause as is.
More information on partial indexes can be found in the [PostgreSQL
docs](http://www.postgresql.org/docs/9.4/static/indexes-partial.html).
## Examples
# Without a name, index defaults to products_category_id_sku_index
create index("products", [:category_id, :sku], unique: true)
# Name can be given explicitly though
drop index("products", [:category_id, :sku], name: :my_special_name)
# Indexes can be added concurrently
create index("products", [:category_id, :sku], concurrently: true)
# The index type can be specified
create index("products", [:name], using: :hash)
# Create a partial index
create index("products", [:user_id], where: "price = 0", name: :free_products_index)
Indexes also support custom expressions. Some databases may require the
index expression to be written between parens:
# Create an index on custom expressions
create index("products", ["(lower(name))"], name: :products_lower_name_index)
# To create a tsvector index with GIN on Postgres
create index("products", ["(to_tsvector('english', name))"],
name: :products_name_vector, using: "GIN")
"""
def index(table, columns, opts \\ [])
def index(table, columns, opts) when is_atom(table) do
index(Atom.to_string(table), columns, opts)
end
def index(table, column, opts) when is_binary(table) and is_atom(column) do
index(table, [column], opts)
end
def index(table, columns, opts) when is_binary(table) and is_list(columns) and is_list(opts) do
index = struct(%Index{table: table, columns: columns}, opts)
%{index | name: index.name || default_index_name(index)}
end
@doc """
Shortcut for creating a unique index.
See `index/3` for more information.
"""
def unique_index(table, columns, opts \\ [])
def unique_index(table, columns, opts) when is_list(opts) do
index(table, columns, [unique: true] ++ opts)
end
defp default_index_name(index) do
[index.table, index.columns, "index"]
|> List.flatten
|> Enum.join("_")
|> String.replace(~r"[^\w_]", "_")
|> String.replace("__", "_")
|> String.to_atom
end
@doc """
Executes arbitrary SQL or a keyword command.
Reversible commands can be defined by calling `execute/2`.
## Examples
execute "CREATE EXTENSION postgres_fdw"
execute create: "posts", capped: true, size: 1024
"""
def execute(command) when is_binary(command) or is_list(command) do
Runner.execute command
end
@doc """
Executes reversible SQL commands.
This is useful for database-specific functionality that does not
warrant special support in Ecto, for example, creating and dropping
a PostgreSQL extension, and avoids having to define up/down blocks.
## Examples
execute "CREATE EXTENSION postgres_fdw", "DROP EXTENSION postgres_fdw"
"""
def execute(up, down) when (is_binary(up) or is_list(up)) and
(is_binary(down) or is_list(down)) do
Runner.execute %Command{up: up, down: down}
end
@doc """
Gets the migrator direction.
"""
@spec direction :: :up | :down
def direction do
Runner.migrator_direction
end
@doc """
Gets the migrator prefix.
"""
def prefix do
Runner.prefix
end
@doc """
Adds a column when creating or altering a table.
This function also accepts Ecto primitive types as column types
and they are normalized by the database adapter. For example,
`:string` is converted to `:varchar`, `:binary` to `:bits` or `:blob`,
and so on.
However, the column type is not always the same as the type used in your
schema. For example, a schema that has a `:string` field,
can be supported by columns of types `:char`, `:varchar`, `:text` and others.
For this reason, this function also accepts `:text` and other columns,
which are sent as is to the underlying database.
To sum up, the column type may be either an Ecto primitive type,
which is normalized in cases the database does not understand it,
like `:string` or `:binary`, or a database type which is passed as is.
Custom Ecto types, like `Ecto.UUID`, are not supported because
they are application level concern and may not always map to the
database.
## Examples
create table("posts") do
add :title, :string, default: "Untitled"
end
alter table("posts") do
add :summary, :text # Database type
add :object, :map # Elixir type which is handled by the database
end
## Options
* `:primary_key` - when true, marks this field as the primary key
* `:default` - the column's default value. can be a string, number
or a fragment generated by `fragment/1`
* `:null` - when `false`, the column does not allow null values
* `:size` - the size of the type (for example the numbers of characters).
Default is no size, except for `:string` that defaults to 255.
* `:precision` - the precision for a numeric type. Required when `scale` is specified.
* `:scale` - the scale of a numeric type. Default is 0 scale
"""
def add(column, type, opts \\ [])
def add(column, :datetime, opts) when is_atom(column) do
IO.warn "the :datetime type in migrations is deprecated, " <>
"please use :utc_datetime or :naive_datetime instead"
add(column, :naive_datetime, opts)
end
def add(column, type, opts) when is_atom(column) and is_list(opts) do
if opts[:scale] && !opts[:precision] do
raise ArgumentError, "Column #{Atom.to_string(column)} is missing precision option"
end
validate_type!(type)
Runner.subcommand {:add, column, type, opts}
end
@doc """
Renames a table.
## Examples
rename table("posts"), to: table("new_posts")
"""
def rename(%Table{} = table_current, to: %Table{} = table_new) do
Runner.execute {:rename, __prefix__(table_current), __prefix__(table_new)}
table_new
end
@doc """
Renames a column outside of the `alter` statement.
## Examples
rename table("posts"), :title, to: :summary
"""
def rename(%Table{} = table, current_column, to: new_column) when is_atom(current_column) and is_atom(new_column) do
Runner.execute {:rename, __prefix__(table), current_column, new_column}
table
end
@doc """
Generates a fragment to be used as default value.
## Examples
create table("posts") do
add :inserted_at, :naive_datetime, default: fragment("now()")
end
"""
def fragment(expr) when is_binary(expr) do
{:fragment, expr}
end
@doc """
Adds `:inserted_at` and `:updated_at` timestamps columns.
Those columns are of `:naive_datetime` type, and by default
cannot be null. `opts` can be given to customize the generated
fields.
## Options
* `:inserted_at` - the name of the column for insertion times, providing `false` disables column
* `:updated_at` - the name of the column for update times, providing `false` disables column
* `:type` - column type, defaults to `:naive_datetime`
"""
def timestamps(opts \\ []) when is_list(opts) do
opts = Keyword.merge(Runner.repo_config(:migration_timestamps, []), opts)
opts = Keyword.put_new(opts, :null, false)
{type, opts} = Keyword.pop(opts, :type, :naive_datetime)
{inserted_at, opts} = Keyword.pop(opts, :inserted_at, :inserted_at)
{updated_at, opts} = Keyword.pop(opts, :updated_at, :updated_at)
if inserted_at != false, do: add(inserted_at, type, opts)
if updated_at != false, do: add(updated_at, type, opts)
end
@doc """
Modifies the type of column when altering a table.
This command is not reversible as Ecto does not know what
is the current type to revert it back to.
See `add/3` for more information on supported types.
## Examples
alter table("posts") do
modify :title, :text
end
## Options
* `:null` - sets to null or not null
* `:default` - changes the default
* `:size` - the size of the type (for example the numbers of characters). Default is no size.
* `:precision` - the precision for a numeric type. Required when `scale` is specified.
* `:scale` - the scale of a numeric type. Default is 0 scale.
"""
def modify(column, type, opts \\ [])
def modify(column, :datetime, opts) when is_atom(column) do
IO.warn "the :datetime type in migrations is deprecated, " <>
"please use :utc_datetime or :naive_datetime instead"
modify(column, :naive_datetime, opts)
end
def modify(column, type, opts) when is_atom(column) and is_list(opts) do
if opts[:scale] && !opts[:precision] do
raise ArgumentError, "Column #{Atom.to_string(column)} is missing precision option"
end
Runner.subcommand {:modify, column, type, opts}
end
@doc """
Removes a column when altering a table.
This command is not reversible as Ecto does not know what
is the current type to add the column back as.
## Examples
alter table("posts") do
remove :title
end
"""
def remove(column) when is_atom(column) do
Runner.subcommand {:remove, column}
end
@doc ~S"""
Defines a foreign key.
## Examples
create table("products") do
add :group_id, references("groups")
end
## Options
* `:name` - The name of the underlying reference,
defaults to "#{table}_#{column}_fkey"
* `:column` - The foreign key column, default is `:id`
* `:type` - The foreign key type, default is `:bigserial`
* `:on_delete` - What to perform if the referenced entry
is deleted. May be `:nothing`, `:delete_all`,
`:nilify_all` or `:restrict`. Defaults to `:nothing`.
* `:on_update` - What to perform if the referenced entry
is updated. May be `:nothing`, `:update_all`,
`:nilify_all` or `:restrict`. Defaults to `:nothing`.
"""
def references(table, opts \\ [])
def references(table, opts) when is_atom(table) do
references(Atom.to_string(table), opts)
end
def references(table, opts) when is_binary(table) and is_list(opts) do
repo_opts = Keyword.take(Runner.repo_config(:migration_primary_key, []), [:type])
opts = Keyword.merge(repo_opts, opts)
reference = struct(%Reference{table: table}, opts)
unless reference.on_delete in [:nothing, :delete_all, :nilify_all, :restrict] do
raise ArgumentError, "unknown :on_delete value: #{inspect reference.on_delete}"
end
unless reference.on_update in [:nothing, :update_all, :nilify_all, :restrict] do
raise ArgumentError, "unknown :on_update value: #{inspect reference.on_update}"
end
reference
end
@doc ~S"""
Defines a constraint (either a check constraint or an exclusion constraint) to be evaluated by the database when a row is inserted or updated.
## Examples
create constraint("users", :price_must_be_positive, check: "price > 0")
create constraint("size_ranges", :no_overlap, exclude: ~s|gist (int4range("from", "to", '[]') WITH &&)|)
drop constraint("products", "price_must_be_positive")
## Options
* `:check` - A check constraint expression. Required when creating a check constraint.
* `:exclude` - An exclusion constraint expression. Required when creating an exclusion constraint.
"""
def constraint(table, name, opts \\ [])
def constraint(table, name, opts) when is_atom(table) do
constraint(Atom.to_string(table), name, opts)
end
def constraint(table, name, opts) when is_binary(table) and is_list(opts) do
struct(%Constraint{table: table, name: name}, opts)
end
@doc """
Executes queue migration commands.
Reverses the order commands are executed when doing a rollback
on a change/0 function and resets commands queue.
"""
def flush do
Runner.flush
end
defp validate_type!(type) when is_atom(type) do
case Atom.to_string(type) do
"Elixir." <> _ ->
raise ArgumentError,
"#{inspect type} is not a valid database type, " <>
"please use an atom like :string, :text and so on"
_ ->
:ok
end
end
defp validate_type!({type, subtype}) when is_atom(type) and is_atom(subtype) do
validate_type!(subtype)
end
defp validate_type!({type, subtype}) when is_atom(type) and is_tuple(subtype) do
for t <- Tuple.to_list(subtype), do: validate_type!(t)
end
defp validate_type!(%Reference{} = reference) do
reference
end
@doc false
def __prefix__(%{prefix: prefix} = index_or_table) do
runner_prefix = Runner.prefix()
cond do
is_nil(prefix) ->
%{index_or_table | prefix: runner_prefix}
is_nil(runner_prefix) or runner_prefix == to_string(prefix) ->
index_or_table
true ->
raise Ecto.MigrationError, message:
"the :prefix option `#{prefix}` does match the migrator prefix `#{runner_prefix}`"
end
end
end
|
deps/ecto/lib/ecto/migration.ex
| 0.869715
| 0.565599
|
migration.ex
|
starcoder
|
defmodule RfidLatachz.Attendances do
@moduledoc """
The Attendances context.
"""
import Ecto.Query, warn: false
alias RfidLatachz.Repo
alias RfidLatachz.Attendances.Attendance
alias RfidLatachz.Users
alias Phoenix.PubSub
import Ecto
@doc """
Returns the list of attendances.
## Examples
iex> list_attendances()
[%Attendance{}, ...]
"""
def list_attendances do
Repo.all(Attendance |> order_by(desc: :inserted_at) ) |> Repo.preload(:user)
end
@doc """
Gets a single attendance.
Raises `Ecto.NoResultsError` if the Attendance does not exist.
## Examples
iex> get_attendance!(123)
%Attendance{}
iex> get_attendance!(456)
** (Ecto.NoResultsError)
"""
def get_attendance!(id), do: Repo.get!(Attendance, id)
@doc """
Creates a attendance.
## Examples
iex> create_attendance(%{field: value})
{:ok, %Attendance{}}
iex> create_attendance(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_attendance(attrs \\ %{}, rfid_uid) do
case Users.get_user_by_rfid_uid(rfid_uid) do
{:ok, user} ->
user
|> build_assoc(:attendances)
|> Attendance.changeset(attrs)
|> Repo.insert()
|> broadcast(:attendance_created)
{:ok, "Attendance created"}
{:error, _} ->
{:error, "User with this uid not found"}
end
end
@doc """
Updates a attendance.
## Examples
iex> update_attendance(attendance, %{field: new_value})
{:ok, %Attendance{}}
iex> update_attendance(attendance, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_attendance(%Attendance{} = attendance, attrs) do
attendance
|> Attendance.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a attendance.
## Examples
iex> delete_attendance(attendance)
{:ok, %Attendance{}}
iex> delete_attendance(attendance)
{:error, %Ecto.Changeset{}}
"""
def delete_attendance(%Attendance{} = attendance) do
Repo.delete(attendance)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking attendance changes.
## Examples
iex> change_attendance(attendance)
%Ecto.Changeset{data: %Attendance{}}
"""
def change_attendance(%Attendance{} = attendance, attrs \\ %{}) do
Attendance.changeset(attendance, attrs)
end
def subscribe do
PubSub.subscribe(RfidLatachz.PubSub, "attendances")
end
defp broadcast({:error, _} = error, _event), do: error
defp broadcast({:ok, attendance}, event) do
PubSub.broadcast(RfidLatachz.PubSub, "attendances", {event, attendance})
end
end
|
web/lib/rfid_latachz/attendances.ex
| 0.781497
| 0.442396
|
attendances.ex
|
starcoder
|
defmodule LayoutOMatic.Dropdown do
# Checkbox size based on :button_font_size with 20 being the default; width/height override
@default_font_size 20
@default_font :roboto
@default_drop_direction :down
@spec translate(%{
component: map,
starting_xy: {number, number},
grid_xy: {number, number},
max_xy: {number, number}
}) ::
{:error, <<_::160, _::_*32>>}
| {:ok, {number, number},
%{
grid_xy: {number, number},
max_xy: number,
primitive: %{data: number, styles: map},
starting_xy: {number, number}
}}
def translate(
%{
component: component,
starting_xy: starting_xy,
grid_xy: grid_xy,
max_xy: max_xy
} = layout
) do
{_, {items, _}} = Map.get(component, :data)
{starting_x, starting_y} = starting_xy
{grid_x, grid_y} = grid_xy
styles = Map.get(component, :styles, %{})
font_size = Map.get(styles, :button_font_size, @default_font_size)
metrics = Scenic.Cache.Static.FontMetrics.get!(@default_font)
ascent = FontMetrics.ascent(font_size, metrics)
# find the width of the widest item
fm_width =
Enum.reduce(items, 0, fn {text, _}, w ->
width = FontMetrics.width(text, font_size, metrics)
max(w, width)
end)
width =
case Map.get(styles, :width) do
nil -> fm_width + ascent * 3
:auto -> fm_width + ascent * 3
width when is_number(width) and width > 0 -> width
end
height =
case Map.get(styles, :height) do
nil -> font_size + ascent
:auto -> font_size + ascent
height when is_number(height) and height > 0 -> height
end
# calculate the drop box measures
item_count = Enum.count(items)
drop_height = item_count * height
drop_direction = Map.get(styles, :direction, @default_drop_direction)
case starting_xy == grid_xy do
true ->
xy =
case drop_direction do
:down ->
{starting_x, starting_y}
:up ->
{starting_x, starting_y + drop_height}
end
layout = Map.put(layout, :starting_xy, {starting_x + width, starting_y})
{:ok, xy, layout}
false ->
# already in a new group, use starting_xy
case fits_in_x?(starting_x + width, max_xy) do
# fits in x
true ->
# fit in y?
case fits_in_y?(starting_y + height + drop_height, max_xy) do
true ->
# fits
xy =
case drop_direction do
:down ->
{starting_x, starting_y}
:up ->
{starting_x, starting_y + drop_height}
end
layout = Map.put(layout, :starting_xy, {starting_x + width, starting_y})
{:ok, xy, layout}
# Does not fit
false ->
{:error, "Does not fit in grid"}
end
# doesnt fit in x
false ->
# fit in new y?
new_y =
case drop_direction do
:down ->
grid_y + height + drop_height
:up ->
grid_y + height + drop_height * 2
end
case fits_in_y?(new_y, max_xy) do
# fits in new y, check x
true ->
new_layout =
layout
|> Map.put(:grid_xy, {grid_x, new_y})
|> Map.put(:starting_xy, {width, new_y})
{:ok, {grid_x, new_y}, new_layout}
false ->
{:error, "Does not fit in the grid"}
end
end
end
end
defp fits_in_x?(potential_x, {max_x, _}),
do: potential_x <= max_x
defp fits_in_y?(potential_y, {_, max_y}),
do: potential_y <= max_y
end
|
lib/layouts/components/dropdown.ex
| 0.742795
| 0.423577
|
dropdown.ex
|
starcoder
|
defmodule Stripe.Token do
@moduledoc """
Work with Stripe token objects.
You can:
- Create a token for a Connect customer with a card
- Create a token with all options - Only for Unit Tests with Stripe
- Retrieve a token
Stripe API reference: https://stripe.com/docs/api/tokens
"""
use Stripe.Entity
import Stripe.Request
@type token_bank_account :: %{
id: Stripe.id(),
object: String.t(),
account_holder_name: String.t() | nil,
account_holder_type: String.t() | nil,
bank_name: String.t() | nil,
country: String.t(),
currency: String.t(),
fingerprint: String.t() | nil,
last4: String.t(),
routing_number: String.t() | nil,
status: String.t()
}
@type token_card :: %{
id: Stripe.id(),
object: String.t(),
address_city: String.t() | nil,
address_country: String.t() | nil,
address_line1: String.t() | nil,
address_line1_check: String.t() | nil,
address_line2: String.t() | nil,
address_state: String.t() | nil,
address_zip: String.t() | nil,
address_zip_check: String.t() | nil,
brand: String.t(),
country: String.t() | nil,
currency: String.t(),
cvc_check: String.t() | nil,
dynamic_last4: String.t() | nil,
exp_month: integer,
exp_year: integer,
fingerprint: String.t() | nil,
funding: String.t(),
last4: String.t(),
metadata: Stripe.Types.metadata(),
name: String.t() | nil,
tokenization_method: String.t() | nil
}
@type t :: %__MODULE__{
id: Stripe.id(),
object: String.t(),
bank_account: token_bank_account | nil,
card: token_card | nil,
client_ip: String.t() | nil,
created: Stripe.timestamp(),
livemode: boolean,
type: String.t(),
used: boolean
}
defstruct [
:id,
:object,
:bank_account,
:card,
:client_ip,
:created,
:livemode,
:type,
:used
]
@plural_endpoint "tokens"
@doc """
Creates a single use token that wraps the details of a credit card. This
token can be used in place of a credit card dictionary with any API method.
These tokens can only be used once: by creating a new charge object, or
attaching them to a customer.
In most cases, you should create tokens client-side using Checkout, Elements,
or Stripe's mobile libraries, instead of using the API.
"""
@spec create(map, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
def create(params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint)
|> put_params(params)
|> put_method(:post)
|> make_request()
end
@doc """
Retrieve a token.
"""
@spec retrieve(Stripe.id() | t, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
def retrieve(id, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}")
|> put_method(:get)
|> make_request()
end
end
|
lib/stripe/core_resources/token.ex
| 0.84729
| 0.411081
|
token.ex
|
starcoder
|
defmodule RemoteIp.Options do
@headers ~w[forwarded x-forwarded-for x-client-ip x-real-ip]
@parsers %{"forwarded" => RemoteIp.Parsers.Forwarded}
@proxies []
@clients []
@moduledoc """
The keyword options given to `RemoteIp.init/1` or `RemoteIp.from/2`.
You shouldn't need to use this module directly. Its functions are used
internally by `RemoteIp` to process configurations and support MFA-style
[runtime options](#module-runtime-options).
You may pass any of the following keyword arguments into the plug (they get
passed to `RemoteIp.init/1`). You can also pass the same keywords directly to
`RemoteIp.from/2`.
## `:headers`
The `:headers` option should be a list of strings. These are the names of
headers that contain forwarding information. The default is
```elixir
#{inspect(@headers, pretty: true)}
```
Every request header whose name exactly matches one of these strings will be
parsed for IP addresses, which are then used to determine the routing
information and ultimately the original client IP. Note that `Plug`
normalizes headers to lowercase, so this option should consist of lowercase
names.
In production, you likely want this to be a singleton - a list of only one
string. There are a couple reasons:
1. You usually can't rely on servers to preserve the relative ordering of
headers in the HTTP request. For example, the
[Cowboy](https://github.com/ninenines/cowboy/) server presently [uses
maps](https://github.com/elixir-plug/plug_cowboy/blob/f82f2ff982f04fb4faa3a12fd2b08a7cc56ebe15/lib/plug/cowboy/conn.ex#L125-L127)
to represent headers, which don't preserve key order. The order in which
we process IPs matters because we take that as the routing information for
the request. So if you have multiple competing headers, the routing might
be ambiguous, and you could get bad results.
2. It could also be a security issue. Say you're only expecting one header
like `X-Forwarded-For`, but configure multiple headers like
`["x-forwarded-for", "x-real-ip"]`. Then it'd be easy for a malicious user
to just set an extra `X-Real-Ip` header and interfere with the IP parsing
(again, due to the sensitive nature of header ordering).
We still allow multiple headers because:
1. Users can get up & running faster if the default configuration recognizes
all of the common headers.
2. You shouldn't be relying that heavily on IP addresses for security. Even a
single plain-text header has enough problems on its own that we can't
guarantee its results are accurate. For more details, see the
documentation for [the algorithm](algorithm.md).
3. It's more general. Networking setups are often very idiosyncratic, and we
want to give users the option to use multiple headers if that's what they
need.
## `:parsers`
The `:parsers` option should be a map from strings to modules. Each string
should be a header name (lowercase), and each module should implement the
`RemoteIp.Parser` behaviour. The default is
```elixir
#{inspect(@parsers, pretty: true)}
```
Headers with the given name are parsed using the given module. If a header is
not found in this map, it will be parsed by `RemoteIp.Parsers.Generic`. So
you can use this option to:
* add a parser for your own custom header
* specialize on the generic parsing of headers like `"x-forwarded-for"`
* replace any of the default parsers with one of your own
The map you provide for this option is automatically merged into the default
using `Map.merge/2`. That way, the stock parsers won't be overridden unless
you explicitly provide your own replacement.
## `:proxies`
The `:proxies` option should be a list of strings - either individual IPs or
ranges in
[CIDR](https://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing)
notation. The default is
```elixir
#{inspect(@proxies, pretty: true)}
```
For the sake of efficiency, you should prefer CIDR notation where possible.
So instead of listing out 256 different addresses for the `1.2.3.x` block,
you should say `"1.2.3.0/24"`.
These proxies are skipped by [the algorithm](algorithm.md) and are never
considered the original client IP, unless specifically overruled by the
`:clients` option.
In addition to the proxies listed here, note that the following [reserved IP
addresses](https://en.wikipedia.org/wiki/Reserved_IP_addresses) are also
skipped automatically, as they are presumed to be internal addresses that
don't belong to the client:
* IPv4 loopback: `127.0.0.0/8`
* IPv6 loopback: `::1/128`
* IPv4 private network: `10.0.0.0/8`, `172.16.0.0/12`, `192.168.0.0/16`
* IPv6 unique local address: `fc00::/7`
## `:clients`
The `:clients` option should be a list of strings - either individual IPs or
ranges in
[CIDR](https://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing)
notation. The default is
```elixir
#{inspect(@clients, pretty: true)}
```
For the sake of efficiency, you should prefer CIDR notation where possible.
So instead of listing out 256 different addresses for the `1.2.3.x` block,
you should say `"1.2.3.0/24"`.
These addresses are never considered to be proxies by [the
algorithm](algorithm.md). For example, if you configure the `:proxies` option
to include `"1.2.3.0/24"` and the `:clients` option to include `"1.2.3.4"`,
then every IP in the `1.2.3.x` block would be considered a proxy *except* for
`1.2.3.4`.
This option can also be used on reserved IP addresses that would otherwise be
skipped automatically. For example, if your routing works through a local
network, you might actually consider addresses in the `10.x.x.x` block to be
clients. You could permit the entire block with `"10.0.0.0/8"`, or even
specific IPs in this range like `"10.1.2.3"`.
## Runtime options
Every option can also accept a tuple of three elements: `{module, function,
arguments}` (MFA). These are passed to `Kernel.apply/3` at runtime, allowing
you to dynamically configure the plug, even though the `Plug.Builder`
generally calls `c:Plug.init/1` at compilation time.
The return value from an MFA should be the same as if you were passing the
literal into that option. For instance, the `:proxies` MFA should return a
list of IP/CIDR strings.
The MFAs you give are re-evaluated on *each call* to `RemoteIp.call/2` or
`RemoteIp.from/2`. So be careful not to do anything too expensive at runtime.
For example, don't download a list of known proxies, or else it will be
re-downloaded on every request. Consider caching the download instead,
perhaps using a library like [`Cachex`](https://hexdocs.pm/cachex).
## Examples
### Basic usage
Suppose you know:
* you are behind proxies in the `1.2.x.x` block
* the proxies use the `X-Real-Ip` header
* but the IP `1.2.3.4` is actually a client, not one of the proxies
Then you could say:
```elixir
defmodule MyApp do
use Plug.Router
plug RemoteIp,
headers: ~w[x-real-ip],
proxies: ~w[1.2.0.0/16],
clients: ~w[1.2.3.4]
plug :match
plug :dispatch
# get "/" do ...
end
```
The same options may also be passed into `RemoteIp.from/2`:
```elixir
defmodule MySocket do
use Phoenix.Socket
@options [
headers: ~w[x-real-ip],
proxies: ~w[1.2.0.0/16],
clients: ~w[1.2.3.4]
]
def connect(params, socket, connect_info) do
ip = RemoteIp.from(connect_info[:x_headers], @options)
# ...
end
end
```
### Custom parser
Suppose your proxies are using a header with a special format. The name of
the header is `X-Special` and the format looks like `ip=127.0.0.1`.
First, you'd implement a custom parser:
```elixir
defmodule SpecialParser do
@behaviour RemoteIp.Parser
@impl RemoteIp.Parser
def parse(header) do
ip = String.replace_prefix(header, "ip=", "")
case :inet.parse_strict_address(ip |> to_charlist()) do
{:ok, parsed} -> [parsed]
_ -> []
end
end
end
```
Then you would configure the plug with that parser. Make sure to also specify
the `:headers` option so that the `X-Special` header actually gets passed to
the parser.
```elixir
defmodule SpecialApp do
use Plug.Router
plug RemoteIp,
headers: ~w[x-special],
parsers: %{"x-special" => SpecialParser}
plug :match
plug :dispatch
# get "/" do ...
end
```
### Using MFAs
Suppose you're deploying a release and you want to get the proxy IPs from an
environment variable. Because the release is compiled ahead of time, you
shouldn't do a `System.get_env/1` inline - it'll just be the value of the
environment variable circa compilation time (probably empty!).
```elixir
defmodule CompiledApp do
use Plug.Router
# DON'T DO THIS: the value of the env var gets compiled into the release
plug RemoteIp, proxies: System.get_env("PROXIES") |> String.split(",")
plug :match
plug :dispatch
# get "/" do ...
end
```
Instead, you can use an MFA to look up the variable at runtime:
```elixir
defmodule RuntimeApp do
use Plug.Router
plug RemoteIp, proxies: {__MODULE__, :proxies, []}
def proxies do
System.get_env("PROXIES") |> String.split(",", trim: true)
end
plug :match
plug :dispatch
# get "/" do ...
end
```
"""
@doc """
The default value for the given option.
"""
def default(option)
def default(:headers), do: @headers
def default(:parsers), do: @parsers
def default(:proxies), do: @proxies
def default(:clients), do: @clients
@doc """
Processes keyword options, delaying the evaluation of MFAs until `unpack/1`.
"""
def pack(options) do
[
headers: pack(options, :headers),
parsers: pack(options, :parsers),
proxies: pack(options, :proxies),
clients: pack(options, :clients)
]
end
defp pack(options, option) do
case Keyword.get(options, option, default(option)) do
{m, f, a} -> {m, f, a}
value -> evaluate(option, value)
end
end
@doc """
Evaluates options processed by `pack/1`, applying MFAs as needed.
"""
def unpack(options) do
[
headers: unpack(options, :headers),
parsers: unpack(options, :parsers),
proxies: unpack(options, :proxies),
clients: unpack(options, :clients)
]
end
defp unpack(options, option) do
case Keyword.get(options, option) do
{m, f, a} -> evaluate(option, apply(m, f, a))
value -> value
end
end
defp evaluate(:headers, headers) do
headers
end
defp evaluate(:parsers, parsers) do
Map.merge(default(:parsers), parsers)
end
defp evaluate(:proxies, proxies) do
proxies |> Enum.map(&RemoteIp.Block.parse!/1)
end
defp evaluate(:clients, clients) do
clients |> Enum.map(&RemoteIp.Block.parse!/1)
end
end
|
lib/remote_ip/options.ex
| 0.889006
| 0.835953
|
options.ex
|
starcoder
|
defmodule Exvalidate do
@moduledoc """
Enter point for validate data structure.
If you want to validate a data set you first have to take into account
the following important points:
1 - This dependence is based on a schema-data system in which two data
structures are received, one called schema and another called data which
have to be related in some way.
2 - The creation of a good schema is essential for the validation of data,
remember that the library validates the data in a sequential way, therefore
it is the schema that decides which is validated first and which rule
is validated first. If it fails, it will always give the corresponding error.
The schema should be a keyw ord list in the following way, examples:
```
@schema [
id: [:required]
]
```
another example:
```
@schema [
id: [:required, length: 16],
name: [type: string, max_legnth: 24]
]
```
The last example first validate the id data with the schema [:required, lenght: 16],
if the required fault, return this error an not continue with the validation.
the rules that are available right now are:
- :accepted.
- between: {min, max}, where min and max are integers.
- default: default_value.
- :email.
- in: [].
- length: equal_length, where equal_length is a integer.
- max_length: max, where max is a integer.
- min_length: min, where min is a integer.
- :required
- type: type, where type can be: :atom, :string, :list, :map, :tuple, :number, :integer, :float
3 - the data should always be a map. At the moment nothing else is accepted,
if you want to validate a specific data I recommend you to call directly
to the rules, each one of them has its documentation with some example of
operation and its corresponding tests.
Example of data:
```
%{
"id" => 12_345,
"name" => "picolo"
}
```
4- The entry point is a macro called validate, which we will use as follows:
```
use Exvalidate
```
and the last:
```
validate(data, schema)
```
"""
alias Exvalidate.Messages
defmacro __using__(_) do
quote do
alias Exvalidate.Validate
@validate_fn &Validate.rules/2
@doc false
def validate(data, schema, validate_fn \\ @validate_fn) do
Exvalidate.run_validate(data, schema, validate_fn)
end
end
end
@spec run_validate(map(), list(), function()) ::
{:ok, map()} | {:error, String.t()}
@doc false
def run_validate(data, schema, validate_fn) do
with :ok <- validate_allowed_params(data, schema),
{:ok, new_data} <- validate_schema(data, schema, validate_fn) do
{:ok, new_data}
else
{:error, msg} -> {:error, msg}
end
end
defp validate_allowed_params(data, schema) do
case Keyword.keys(schema) -- keys_to_atom(Map.keys(data)) do
[] ->
:ok
[field | _rest] ->
{:error, "#{field} is not allowed."}
end
end
defp keys_to_atom(keys) do
Enum.map(keys, &String.to_atom(&1))
end
defp validate_schema(data, schema, validate_fn) do
Enum.reduce_while(schema, {:ok, data}, &validating(&1, &2, validate_fn))
end
defp validating({key, rules}, {:ok, data}, validate_fn) do
parse_key = Atom.to_string(key)
rule_data = Map.get(data, parse_key)
case validate_fn.(rules, rule_data) do
{:ok, data_validate} ->
modified_data = Map.put(data, parse_key, data_validate)
{:cont, {:ok, modified_data}}
{:error, error} ->
{:halt, {:error, Messages.get(error, rule_data, key)}}
end
end
end
|
lib/exvalidate.ex
| 0.862366
| 0.993436
|
exvalidate.ex
|
starcoder
|
defmodule Exnoops.Drumbot do
@moduledoc """
Module to interact with Github's Noop: Drumbot
See the [official `noop` documentation](https://noopschallenge.com/challenges/drumbot)
"""
require Logger
import Exnoops.API
@noop "drumbot"
@doc """
Query Drumbot for patterns
## Examples
iex> Exnoops.Drumbot.get_patterns()
{:ok,
[
"oontza",
"bossanoopa",
"nipnop",
"botthisway",
"funkee",
"shlojam",
"botorik",
"swoop",
"schmaltz",
"bouncy"
]
}
"""
@spec get_patterns :: {atom(), list()}
def get_patterns do
Logger.debug("Calling Drumbot.get_patterns()")
case get("/" <> @noop <> "/patterns", []) do
{:ok, res} when is_list(res) ->
{:ok, for(%{"name" => value} <- res, do: value)}
error ->
error
end
end
@doc """
Query Drumbot for a `pattern`'s data
## Examples
iex> Exnoops.Drumbot.get_pattern("nipnop")
{:ok,
%{
"name" => "nipnop",
"beatsPerMinute" => 92,
"stepCount" => 16,
"tracks" => [
%{
"instrument" => "snare",
"steps" => [0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0]
},
%{
"instrument" => "clap",
"steps" => [0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1]
},
%{
"instrument" => "cowbell",
"steps" => [0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0]
},
%{
"instrument" => "kick",
"steps" => [1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0]
}
]
}}
"""
@spec get_pattern(String.t()) :: {atom(), map()}
def get_pattern(pattern) when is_binary(pattern) do
Logger.debug("Calling Drumbot.get_pattern()")
case get("/" <> @noop <> "/patterns/" <> pattern, []) do
{:ok, %{"name" => ^pattern} = res} ->
{:ok, res}
error ->
error
end
end
end
|
lib/exnoops/drumbot.ex
| 0.573798
| 0.507507
|
drumbot.ex
|
starcoder
|
defmodule AWS.RDSData do
@moduledoc """
Amazon RDS Data Service
Amazon RDS provides an HTTP endpoint to run SQL statements on an Amazon Aurora
Serverless DB cluster.
To run these statements, you work with the Data Service API.
For more information about the Data Service API, see [Using the Data API for Aurora
Serverless](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/data-api.html)
in the *Amazon Aurora User Guide*.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2018-08-01",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "rds-data",
global?: false,
protocol: "rest-json",
service_id: "RDS Data",
signature_version: "v4",
signing_name: "rds-data",
target_prefix: nil
}
end
@doc """
Runs a batch SQL statement over an array of data.
You can run bulk update and insert operations for multiple records using a DML
statement with different parameter sets. Bulk operations can provide a
significant performance improvement over individual insert and update
operations.
If a call isn't part of a transaction because it doesn't include the
`transactionID` parameter, changes that result from the call are committed
automatically.
"""
def batch_execute_statement(%Client{} = client, input, options \\ []) do
url_path = "/BatchExecute"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Starts a SQL transaction.
` A transaction can run for a maximum of 24 hours. A transaction is terminated
and rolled back automatically after 24 hours.
A transaction times out if no calls use its transaction ID in three minutes. If
a transaction times out before it's committed, it's rolled back automatically.
DDL statements inside a transaction cause an implicit commit. We recommend that
you run each DDL statement in a separate `ExecuteStatement` call with
`continueAfterTimeout` enabled.
`
"""
def begin_transaction(%Client{} = client, input, options \\ []) do
url_path = "/BeginTransaction"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Ends a SQL transaction started with the `BeginTransaction` operation and commits
the changes.
"""
def commit_transaction(%Client{} = client, input, options \\ []) do
url_path = "/CommitTransaction"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Runs one or more SQL statements.
This operation is deprecated. Use the `BatchExecuteStatement` or
`ExecuteStatement` operation.
"""
def execute_sql(%Client{} = client, input, options \\ []) do
url_path = "/ExecuteSql"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Runs a SQL statement against a database.
If a call isn't part of a transaction because it doesn't include the
`transactionID` parameter, changes that result from the call are committed
automatically.
The response size limit is 1 MB. If the call returns more than 1 MB of response
data, the call is terminated.
"""
def execute_statement(%Client{} = client, input, options \\ []) do
url_path = "/Execute"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Performs a rollback of a transaction.
Rolling back a transaction cancels its changes.
"""
def rollback_transaction(%Client{} = client, input, options \\ []) do
url_path = "/RollbackTransaction"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
end
|
lib/aws/generated/rds_data.ex
| 0.828211
| 0.457197
|
rds_data.ex
|
starcoder
|
defmodule AWS.SNS do
@moduledoc """
Amazon Simple Notification Service
Amazon Simple Notification Service (Amazon SNS) is a web service that enables
you to build distributed web-enabled applications.
Applications can use Amazon SNS to easily push real-time notification messages
to interested subscribers over multiple delivery protocols. For more information
about this product see [https://aws.amazon.com/sns](http://aws.amazon.com/sns/). For detailed information about Amazon SNS features and their associated API
calls, see the [Amazon SNS Developer
Guide](https://docs.aws.amazon.com/sns/latest/dg/).
We also provide SDKs that enable you to access Amazon SNS from your preferred
programming language. The SDKs contain functionality that automatically takes
care of tasks such as: cryptographically signing your service requests, retrying
requests, and handling error responses. For a list of available SDKs, go to
[Tools for Amazon Web Services](http://aws.amazon.com/tools/).
"""
@doc """
Adds a statement to a topic's access control policy, granting access for the
specified AWS accounts to the specified actions.
"""
def add_permission(client, input, options \\ []) do
request(client, "AddPermission", input, options)
end
@doc """
Accepts a phone number and indicates whether the phone holder has opted out of
receiving SMS messages from your account.
You cannot send SMS messages to a number that is opted out.
To resume sending messages, you can opt in the number by using the
`OptInPhoneNumber` action.
"""
def check_if_phone_number_is_opted_out(client, input, options \\ []) do
request(client, "CheckIfPhoneNumberIsOptedOut", input, options)
end
@doc """
Verifies an endpoint owner's intent to receive messages by validating the token
sent to the endpoint by an earlier `Subscribe` action.
If the token is valid, the action creates a new subscription and returns its
Amazon Resource Name (ARN). This call requires an AWS signature only when the
`AuthenticateOnUnsubscribe` flag is set to "true".
"""
def confirm_subscription(client, input, options \\ []) do
request(client, "ConfirmSubscription", input, options)
end
@doc """
Creates a platform application object for one of the supported push notification
services, such as APNS and GCM (Firebase Cloud Messaging), to which devices and
mobile apps may register.
You must specify `PlatformPrincipal` and `PlatformCredential` attributes when
using the `CreatePlatformApplication` action.
`PlatformPrincipal` and `PlatformCredential` are received from the notification
service.
* For `ADM`, `PlatformPrincipal` is `client id` and
`PlatformCredential` is `client secret`.
* For `Baidu`, `PlatformPrincipal` is `API key` and
`PlatformCredential` is `secret key`.
* For `APNS` and `APNS_SANDBOX`, `PlatformPrincipal` is `SSL
certificate` and `PlatformCredential` is `private key`.
* For `GCM` (Firebase Cloud Messaging), there is no
`PlatformPrincipal` and the `PlatformCredential` is `API key`.
* For `MPNS`, `PlatformPrincipal` is `TLS certificate` and
`PlatformCredential` is `private key`.
* For `WNS`, `PlatformPrincipal` is `Package Security Identifier`
and `PlatformCredential` is `secret key`.
You can use the returned `PlatformApplicationArn` as an attribute for the
`CreatePlatformEndpoint` action.
"""
def create_platform_application(client, input, options \\ []) do
request(client, "CreatePlatformApplication", input, options)
end
@doc """
Creates an endpoint for a device and mobile app on one of the supported push
notification services, such as GCM (Firebase Cloud Messaging) and APNS.
`CreatePlatformEndpoint` requires the `PlatformApplicationArn` that is returned
from `CreatePlatformApplication`. You can use the returned `EndpointArn` to send
a message to a mobile app or by the `Subscribe` action for subscription to a
topic. The `CreatePlatformEndpoint` action is idempotent, so if the requester
already owns an endpoint with the same device token and attributes, that
endpoint's ARN is returned without creating a new endpoint. For more
information, see [Using Amazon SNS Mobile Push Notifications](https://docs.aws.amazon.com/sns/latest/dg/SNSMobilePush.html).
When using `CreatePlatformEndpoint` with Baidu, two attributes must be provided:
ChannelId and UserId. The token field must also contain the ChannelId. For more
information, see [Creating an Amazon SNS Endpoint for Baidu](https://docs.aws.amazon.com/sns/latest/dg/SNSMobilePushBaiduEndpoint.html).
"""
def create_platform_endpoint(client, input, options \\ []) do
request(client, "CreatePlatformEndpoint", input, options)
end
@doc """
Creates a topic to which notifications can be published.
Users can create at most 100,000 topics. For more information, see
[https://aws.amazon.com/sns](http://aws.amazon.com/sns/). This action is
idempotent, so if the requester already owns a topic with the specified name,
that topic's ARN is returned without creating a new topic.
"""
def create_topic(client, input, options \\ []) do
request(client, "CreateTopic", input, options)
end
@doc """
Deletes the endpoint for a device and mobile app from Amazon SNS.
This action is idempotent. For more information, see [Using Amazon SNS Mobile Push
Notifications](https://docs.aws.amazon.com/sns/latest/dg/SNSMobilePush.html).
When you delete an endpoint that is also subscribed to a topic, then you must
also unsubscribe the endpoint from the topic.
"""
def delete_endpoint(client, input, options \\ []) do
request(client, "DeleteEndpoint", input, options)
end
@doc """
Deletes a platform application object for one of the supported push notification
services, such as APNS and GCM (Firebase Cloud Messaging).
For more information, see [Using Amazon SNS Mobile Push Notifications](https://docs.aws.amazon.com/sns/latest/dg/SNSMobilePush.html).
"""
def delete_platform_application(client, input, options \\ []) do
request(client, "DeletePlatformApplication", input, options)
end
@doc """
Deletes a topic and all its subscriptions.
Deleting a topic might prevent some messages previously sent to the topic from
being delivered to subscribers. This action is idempotent, so deleting a topic
that does not exist does not result in an error.
"""
def delete_topic(client, input, options \\ []) do
request(client, "DeleteTopic", input, options)
end
@doc """
Retrieves the endpoint attributes for a device on one of the supported push
notification services, such as GCM (Firebase Cloud Messaging) and APNS.
For more information, see [Using Amazon SNS Mobile Push Notifications](https://docs.aws.amazon.com/sns/latest/dg/SNSMobilePush.html).
"""
def get_endpoint_attributes(client, input, options \\ []) do
request(client, "GetEndpointAttributes", input, options)
end
@doc """
Retrieves the attributes of the platform application object for the supported
push notification services, such as APNS and GCM (Firebase Cloud Messaging).
For more information, see [Using Amazon SNS Mobile Push Notifications](https://docs.aws.amazon.com/sns/latest/dg/SNSMobilePush.html).
"""
def get_platform_application_attributes(client, input, options \\ []) do
request(client, "GetPlatformApplicationAttributes", input, options)
end
@doc """
Returns the settings for sending SMS messages from your account.
These settings are set with the `SetSMSAttributes` action.
"""
def get_s_m_s_attributes(client, input, options \\ []) do
request(client, "GetSMSAttributes", input, options)
end
@doc """
Returns all of the properties of a subscription.
"""
def get_subscription_attributes(client, input, options \\ []) do
request(client, "GetSubscriptionAttributes", input, options)
end
@doc """
Returns all of the properties of a topic.
Topic properties returned might differ based on the authorization of the user.
"""
def get_topic_attributes(client, input, options \\ []) do
request(client, "GetTopicAttributes", input, options)
end
@doc """
Lists the endpoints and endpoint attributes for devices in a supported push
notification service, such as GCM (Firebase Cloud Messaging) and APNS.
The results for `ListEndpointsByPlatformApplication` are paginated and return a
limited list of endpoints, up to 100. If additional records are available after
the first page results, then a NextToken string will be returned. To receive the
next page, you call `ListEndpointsByPlatformApplication` again using the
NextToken string received from the previous call. When there are no more records
to return, NextToken will be null. For more information, see [Using Amazon SNS Mobile Push
Notifications](https://docs.aws.amazon.com/sns/latest/dg/SNSMobilePush.html).
This action is throttled at 30 transactions per second (TPS).
"""
def list_endpoints_by_platform_application(client, input, options \\ []) do
request(client, "ListEndpointsByPlatformApplication", input, options)
end
@doc """
Returns a list of phone numbers that are opted out, meaning you cannot send SMS
messages to them.
The results for `ListPhoneNumbersOptedOut` are paginated, and each page returns
up to 100 phone numbers. If additional phone numbers are available after the
first page of results, then a `NextToken` string will be returned. To receive
the next page, you call `ListPhoneNumbersOptedOut` again using the `NextToken`
string received from the previous call. When there are no more records to
return, `NextToken` will be null.
"""
def list_phone_numbers_opted_out(client, input, options \\ []) do
request(client, "ListPhoneNumbersOptedOut", input, options)
end
@doc """
Lists the platform application objects for the supported push notification
services, such as APNS and GCM (Firebase Cloud Messaging).
The results for `ListPlatformApplications` are paginated and return a limited
list of applications, up to 100. If additional records are available after the
first page results, then a NextToken string will be returned. To receive the
next page, you call `ListPlatformApplications` using the NextToken string
received from the previous call. When there are no more records to return,
`NextToken` will be null. For more information, see [Using Amazon SNS Mobile Push
Notifications](https://docs.aws.amazon.com/sns/latest/dg/SNSMobilePush.html).
This action is throttled at 15 transactions per second (TPS).
"""
def list_platform_applications(client, input, options \\ []) do
request(client, "ListPlatformApplications", input, options)
end
@doc """
Returns a list of the requester's subscriptions.
Each call returns a limited list of subscriptions, up to 100. If there are more
subscriptions, a `NextToken` is also returned. Use the `NextToken` parameter in
a new `ListSubscriptions` call to get further results.
This action is throttled at 30 transactions per second (TPS).
"""
def list_subscriptions(client, input, options \\ []) do
request(client, "ListSubscriptions", input, options)
end
@doc """
Returns a list of the subscriptions to a specific topic.
Each call returns a limited list of subscriptions, up to 100. If there are more
subscriptions, a `NextToken` is also returned. Use the `NextToken` parameter in
a new `ListSubscriptionsByTopic` call to get further results.
This action is throttled at 30 transactions per second (TPS).
"""
def list_subscriptions_by_topic(client, input, options \\ []) do
request(client, "ListSubscriptionsByTopic", input, options)
end
@doc """
List all tags added to the specified Amazon SNS topic.
For an overview, see [Amazon SNS Tags](https://docs.aws.amazon.com/sns/latest/dg/sns-tags.html) in the *Amazon
Simple Notification Service Developer Guide*.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Returns a list of the requester's topics.
Each call returns a limited list of topics, up to 100. If there are more topics,
a `NextToken` is also returned. Use the `NextToken` parameter in a new
`ListTopics` call to get further results.
This action is throttled at 30 transactions per second (TPS).
"""
def list_topics(client, input, options \\ []) do
request(client, "ListTopics", input, options)
end
@doc """
Use this request to opt in a phone number that is opted out, which enables you
to resume sending SMS messages to the number.
You can opt in a phone number only once every 30 days.
"""
def opt_in_phone_number(client, input, options \\ []) do
request(client, "OptInPhoneNumber", input, options)
end
@doc """
Sends a message to an Amazon SNS topic, a text message (SMS message) directly to
a phone number, or a message to a mobile platform endpoint (when you specify the
`TargetArn`).
If you send a message to a topic, Amazon SNS delivers the message to each
endpoint that is subscribed to the topic. The format of the message depends on
the notification protocol for each subscribed endpoint.
When a `messageId` is returned, the message has been saved and Amazon SNS will
attempt to deliver it shortly.
To use the `Publish` action for sending a message to a mobile endpoint, such as
an app on a Kindle device or mobile phone, you must specify the EndpointArn for
the TargetArn parameter. The EndpointArn is returned when making a call with the
`CreatePlatformEndpoint` action.
For more information about formatting messages, see [Send Custom Platform-Specific Payloads in Messages to Mobile
Devices](https://docs.aws.amazon.com/sns/latest/dg/mobile-push-send-custommessage.html).
You can publish messages only to topics and endpoints in the same AWS Region.
"""
def publish(client, input, options \\ []) do
request(client, "Publish", input, options)
end
@doc """
Removes a statement from a topic's access control policy.
"""
def remove_permission(client, input, options \\ []) do
request(client, "RemovePermission", input, options)
end
@doc """
Sets the attributes for an endpoint for a device on one of the supported push
notification services, such as GCM (Firebase Cloud Messaging) and APNS.
For more information, see [Using Amazon SNS Mobile Push Notifications](https://docs.aws.amazon.com/sns/latest/dg/SNSMobilePush.html).
"""
def set_endpoint_attributes(client, input, options \\ []) do
request(client, "SetEndpointAttributes", input, options)
end
@doc """
Sets the attributes of the platform application object for the supported push
notification services, such as APNS and GCM (Firebase Cloud Messaging).
For more information, see [Using Amazon SNS Mobile Push Notifications](https://docs.aws.amazon.com/sns/latest/dg/SNSMobilePush.html).
For information on configuring attributes for message delivery status, see
[Using Amazon SNS Application Attributes for Message Delivery Status](https://docs.aws.amazon.com/sns/latest/dg/sns-msg-status.html).
"""
def set_platform_application_attributes(client, input, options \\ []) do
request(client, "SetPlatformApplicationAttributes", input, options)
end
@doc """
Use this request to set the default settings for sending SMS messages and
receiving daily SMS usage reports.
You can override some of these settings for a single message when you use the
`Publish` action with the `MessageAttributes.entry.N` parameter. For more
information, see [Sending an SMS Message](https://docs.aws.amazon.com/sns/latest/dg/sms_publish-to-phone.html) in
the *Amazon SNS Developer Guide*.
"""
def set_s_m_s_attributes(client, input, options \\ []) do
request(client, "SetSMSAttributes", input, options)
end
@doc """
Allows a subscription owner to set an attribute of the subscription to a new
value.
"""
def set_subscription_attributes(client, input, options \\ []) do
request(client, "SetSubscriptionAttributes", input, options)
end
@doc """
Allows a topic owner to set an attribute of the topic to a new value.
"""
def set_topic_attributes(client, input, options \\ []) do
request(client, "SetTopicAttributes", input, options)
end
@doc """
Subscribes an endpoint to an Amazon SNS topic.
If the endpoint type is HTTP/S or email, or if the endpoint and the topic are
not in the same AWS account, the endpoint owner must the `ConfirmSubscription`
action to confirm the subscription.
You call the `ConfirmSubscription` action with the token from the subscription
response. Confirmation tokens are valid for three days.
This action is throttled at 100 transactions per second (TPS).
"""
def subscribe(client, input, options \\ []) do
request(client, "Subscribe", input, options)
end
@doc """
Add tags to the specified Amazon SNS topic.
For an overview, see [Amazon SNS Tags](https://docs.aws.amazon.com/sns/latest/dg/sns-tags.html) in the *Amazon
SNS Developer Guide*.
When you use topic tags, keep the following guidelines in mind:
* Adding more than 50 tags to a topic isn't recommended.
* Tags don't have any semantic meaning. Amazon SNS interprets tags
as character strings.
* Tags are case-sensitive.
* A new tag with a key identical to that of an existing tag
overwrites the existing tag.
* Tagging actions are limited to 10 TPS per AWS account, per AWS
region. If your application requires a higher throughput, file a [technical support
request](https://console.aws.amazon.com/support/home#/case/create?issueType=technical).
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Deletes a subscription.
If the subscription requires authentication for deletion, only the owner of the
subscription or the topic's owner can unsubscribe, and an AWS signature is
required. If the `Unsubscribe` call does not require authentication and the
requester is not the subscription owner, a final cancellation message is
delivered to the endpoint, so that the endpoint owner can easily resubscribe to
the topic if the `Unsubscribe` request was unintended.
This action is throttled at 100 transactions per second (TPS).
"""
def unsubscribe(client, input, options \\ []) do
request(client, "Unsubscribe", input, options)
end
@doc """
Remove tags from the specified Amazon SNS topic.
For an overview, see [Amazon SNS Tags](https://docs.aws.amazon.com/sns/latest/dg/sns-tags.html) in the *Amazon
SNS Developer Guide*.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "sns"}
host = build_host("sns", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-www-form-urlencoded"}
]
input = Map.merge(input, %{"Action" => action, "Version" => "2010-03-31"})
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :query)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :xml)
end
end
|
lib/aws/generated/sns.ex
| 0.863679
| 0.572902
|
sns.ex
|
starcoder
|
defmodule CodeCorps.Tasks.Query do
@moduledoc ~S"""
Holds queries used to retrieve a list of, or a single `Task` record from the
database, using a provided map of parameters/filters.
"""
import Ecto.Query
import ScoutApm.Tracing
alias CodeCorps.{Helpers, Project, Task, Repo}
alias Ecto.Queryable
@doc ~S"""
Returns a list of `Task` records, filtered by a map of parameters.
Accepted parameters are a `project_id`, or a list of comma separated
`task_list_ids`, combined with a `status`.
The records are returned ordered by the `:order` field, ascending.
"""
@spec list(map) :: list(Project.t)
def list(%{} = params) do
timing("Tasks.Query", "list") do
Task
|> Helpers.Query.id_filter(params)
|> apply_archived_status(params)
|> apply_status(params)
|> apply_optional_filters(params)
|> order_by([asc: :order])
|> Repo.all()
end
end
@spec apply_optional_filters(Queryable.t, map) :: Queryable.t
defp apply_optional_filters(query, %{"filter" => %{} = params}) do
query |> apply_optional_filters(params)
end
defp apply_optional_filters(query, %{"project_id" => project_id} = params) do
query
|> where(project_id: ^project_id)
|> apply_optional_filters(params |> Map.delete("project_id"))
end
defp apply_optional_filters(query, %{"task_list_ids" => task_list_ids} = params) do
task_list_ids = task_list_ids |> Helpers.String.coalesce_id_string
query
|> where([r], r.task_list_id in ^task_list_ids)
|> apply_optional_filters(params |> Map.delete("task_list_ids"))
end
defp apply_optional_filters(query, %{}), do: query
@spec apply_archived_status(Queryable.t, map) :: Queryable.t
defp apply_archived_status(query, %{"archived" => archived}) do
query
|> where(archived: ^archived)
end
defp apply_archived_status(query, %{}) do
query
|> where(archived: false)
end
@spec apply_status(Queryable.t, map) :: Queryable.t
defp apply_status(query, %{"status" => status}) do
query
|> where(status: ^status)
end
defp apply_status(query, %{}), do: query
@doc ~S"""
Returns a `Task` record retrived using a set of parameters.
This set can be
- a combination of `project_id` and `number`
- a combination of `task_list_id` and `number`
- an `id`
"""
@spec find(map) :: Queryable.t
def find(%{"project_id" => project_id, "number" => number}) do
Task |> Repo.get_by(project_id: project_id, number: number)
end
def find(%{"task_list_id" => task_list_id, "number" => number}) do
Task |> Repo.get_by(task_list_id: task_list_id, number: number)
end
def find(%{"id" => id}) do
Task |> Repo.get(id)
end
end
|
lib/code_corps/tasks/query.ex
| 0.811041
| 0.417331
|
query.ex
|
starcoder
|
defmodule ETLSystem.Workflow do
@moduledoc """
Contains the necessary information to define a workflow
"""
@typedoc """
Defines the structure of a workflow.
"""
@type t :: %__MODULE__{
id: binary() | atom(),
next: list(),
args: any(),
previous: any(),
run_id: binary() | atom()
}
defstruct [:id, :next, :args, :previous, :run_id]
@doc """
Create a new workflow with:
- The workflow steps and params
- The name of the workflow defined in the config
- A unique identifier for the run itself for logging purposes
"""
@spec new(t(), binary() | atom(), binary() | atom()) :: t()
def new(workflow_steps, id, run_id),
do: %__MODULE__{id: id, next: workflow_steps, run_id: run_id}
@doc """
Update :previous with a given value.
This is used by the runner to store the result of a task for the next task to receive
"""
@spec previous(t(), any()) :: t()
def previous(workflow, previous), do: %__MODULE__{workflow | previous: previous}
@doc """
Update a running workflow's next steps.
This is used by a task that wants to change all future tasks in the workflow.
"""
@spec next_steps(t(), list(module())) :: t()
def next_steps(workflow, next), do: %__MODULE__{workflow | next: next}
@doc """
Inject one step into the next slot in the workflow.
This is used by a task that wants to only add one step into the future.
"""
@spec next_up(t(), module()) :: t()
def next_up(workflow, next), do: %__MODULE__{workflow | next: [next | workflow.next]}
@doc """
Inject one step into the next slot in the workflow with some given argument.
This is used by a task that wants to only add one step into the future with an argument.
The functionality is identical to using `next_up/2` and passing in a tuple.
E.g.,
`next_up(workflow, __MODULE__, 10)` is the same as `next_up(workflow, {__MODULE__, 10})`
"""
@spec next_up(t(), module(), any()) :: t()
def next_up(workflow, next, target),
do: %__MODULE__{workflow | next: [{next, target} | workflow.next]}
end
|
lib/etl_system/workflow.ex
| 0.86521
| 0.562958
|
workflow.ex
|
starcoder
|
defmodule AbsintheErrorPayload.ChangesetParser do
@moduledoc """
Converts an ecto changeset into a list of validation errors structs.
Currently *does not* support nested errors
"""
import Ecto.Changeset, only: [traverse_errors: 2]
alias AbsintheErrorPayload.ValidationMessage
@doc "Generate a list of `AbsintheErrorPayload.ValidationMessage` structs from changeset errors
For examples, please see the test cases in the github repo.
"
def extract_messages(changeset) do
changeset
|> reject_replaced_changes()
|> traverse_errors(&construct_traversed_message/3)
|> Enum.to_list()
|> Enum.flat_map(&handle_nested_errors/1)
end
defp reject_replaced_changes(values) when is_list(values) do
values
|> Enum.map(&reject_replaced_changes/1)
|> Enum.reject(&match?(%Ecto.Changeset{action: :replace}, &1))
end
defp reject_replaced_changes(%{changes: changes} = changeset) do
Enum.reduce(changes, changeset, fn {key, value}, acc ->
%{acc | changes: Map.put(acc.changes, key, reject_replaced_changes(value))}
end)
end
defp reject_replaced_changes(value), do: value
defp handle_nested_errors({parent_field, values}) when is_map(values) do
Enum.flat_map(values, fn {field, value} ->
field_with_parent = construct_field(parent_field, field)
handle_nested_errors({field_with_parent, value})
end)
end
defp handle_nested_errors({parent_field, values}) when is_list(values) do
values
|> Enum.with_index()
|> Enum.flat_map(&handle_nested_error(parent_field, &1))
end
defp handle_nested_errors({_field, values}), do: values
defp handle_nested_error(parent_field, {%ValidationMessage{} = value, _index}) do
[%{value | field: parent_field}]
end
defp handle_nested_error(parent_field, {many_values, index}) do
Enum.flat_map(many_values, fn {field, values} ->
field_with_index = construct_field(parent_field, field, index: index)
handle_nested_errors({field_with_index, values})
end)
end
defp construct_traversed_message(_changeset, field, {message, opts}) do
construct_message(field, {message, opts})
end
defp construct_field(parent_field, field, options \\ []) do
:absinthe_error_payload
|> Application.get_env(:field_constructor)
|> apply(:error, [parent_field, field, options])
end
@doc "Generate a single `AbsintheErrorPayload.ValidationMessage` struct from a changeset.
This method is designed to be used with `Ecto.Changeset.traverse_errors` to generate a map of structs.
## Examples
```
error_map = Changeset.traverse_errors(fn(changeset, field, error) ->
AbsintheErrorPayload.ChangesetParser.construct_message(field, error)
end)
error_list = Enum.flat_map(error_map, fn({_, messages}) -> messages end)
```
"
def construct_message(field, error_tuple)
def construct_message(field, {message, opts}) do
%ValidationMessage{
code: to_code({message, opts}),
field: construct_field(field, nil),
key: field,
template: message,
message: interpolate_message({message, opts}),
options: tidy_opts(opts)
}
end
defp tidy_opts(opts) do
Keyword.drop(opts, [:validation, :max, :is, :min, :code])
end
@doc """
Inserts message variables into message.
## Examples
iex> interpolate_message({"length should be between %{one} and %{two}", [one: "1", two: "2", three: "3"]})
"length should be between 1 and 2"
"""
# Code Taken from the Pheonix DataCase.on_errors/1 boilerplate"
def interpolate_message({message, opts}) do
Enum.reduce(opts, message, fn {key, value}, acc ->
key_pattern = "%{#{key}}"
value =
case value do
{type, subtype} ->
type <> ":" <> subtype
_else ->
to_string(value)
end
if String.contains?(acc, key_pattern) do
String.replace(acc, key_pattern, to_string(value))
else
acc
end
end)
end
@doc """
Generate unique code for each validation type.
Expects an array of validation options such as those supplied
by `Ecto.Changeset.traverse_errors/2`, with the addition of a message key containing the message string.
Messages are required for several validation types to be identified.
## Supported
- `:cast` - generated by `Ecto.Changeset.cast/3`
- `:association` - generated by `Ecto.Changeset.assoc_constraint/3`, `Ecto.Changeset.cast_assoc/3`, `Ecto.Changeset.put_assoc/3`, `Ecto.Changeset.cast_embed/3`, `Ecto.Changeset.put_embed/3`
- `:acceptance` - generated by `Ecto.Changeset.validate_acceptance/3`
- `:confirmation` - generated by `Ecto.Changeset.validate_confirmation/3`
- `:length` - generated by `Ecto.Changeset.validate_length/3` when the `:is` option fails validation
- `:min` - generated by `Ecto.Changeset.validate_length/3` when the `:min` option fails validation
- `:max` - generated by `Ecto.Changeset.validate_length/3` when the `:max` option fails validation
- `:less_than_or_equal_to` - generated by `Ecto.Changeset.validate_length/3` when the `:less_than_or_equal_to` option fails validation
- `:less_than` - generated by `Ecto.Changeset.validate_length/3` when the `:less_than` option fails validation
- `:greater_than_or_equal_to` - generated by `Ecto.Changeset.validate_length/3` when the `:greater_than_or_equal_to` option fails validation
- `:greater_than` - generated by `Ecto.Changeset.validate_length/3` when the `:greater_than` option fails validation
- `:equal_to` - generated by `Ecto.Changeset.validate_length/3` when the `:equal_to` option fails validation
- `:exclusion` - generated by `Ecto.Changeset.validate_exclusion/4`
- `:inclusion` - generated by `Ecto.Changeset.validate_inclusion/4`
- `:required` - generated by `Ecto.Changeset.validate_required/3`
- `:subset` - generated by `Ecto.Changeset.validate_subset/4`
- `:unique` - generated by `Ecto.Changeset.unique_constraint/3`
- `:foreign` - generated by `Ecto.Changeset.foreign_key_constraint/3`
- `:no_assoc_constraint` - generated by `Ecto.Changeset.no_assoc_constraint/3`
- `:unknown` - supplied when validation cannot be matched. This will also match any custom errors added through
`Ecto.Changeset.add_error/4`, `Ecto.Changeset.validate_change/3`, and `Ecto.Changeset.validate_change/4`
"""
def to_code({message, validation_options}) do
validation_options
|> Enum.into(%{message: message})
|> validation_options_to_code()
end
defp validation_options_to_code(%{code: code}), do: code
defp validation_options_to_code(%{validation: :cast}), do: :cast
defp validation_options_to_code(%{validation: :required}), do: :required
defp validation_options_to_code(%{validation: :format}), do: :format
defp validation_options_to_code(%{validation: :inclusion}), do: :inclusion
defp validation_options_to_code(%{validation: :exclusion}), do: :exclusion
defp validation_options_to_code(%{validation: :subset}), do: :subset
defp validation_options_to_code(%{validation: :acceptance}), do: :acceptance
defp validation_options_to_code(%{validation: :confirmation}), do: :confirmation
defp validation_options_to_code(%{validation: :length, kind: :is}), do: :length
defp validation_options_to_code(%{validation: :length, kind: :min}), do: :min
defp validation_options_to_code(%{validation: :length, kind: :max}), do: :max
defp validation_options_to_code(%{validation: :number, message: message}) do
cond do
String.contains?(message, "less than or equal to") -> :less_than_or_equal_to
String.contains?(message, "greater than or equal to") -> :greater_than_or_equal_to
String.contains?(message, "less than") -> :less_than
String.contains?(message, "greater than") -> :greater_than
String.contains?(message, "equal to") -> :equal_to
true -> :unknown
end
end
defp validation_options_to_code(%{message: "is invalid", type: _}), do: :association
defp validation_options_to_code(%{message: "has already been taken"}), do: :unique
defp validation_options_to_code(%{message: "does not exist"}), do: :foreign
defp validation_options_to_code(%{message: "is still associated with this entry"}), do: :no_assoc
defp validation_options_to_code(_unknown) do
:unknown
end
end
|
lib/absinthe_error_payload/changeset_parser.ex
| 0.89371
| 0.560854
|
changeset_parser.ex
|
starcoder
|
defmodule LinkNode do
defstruct value: nil,
next: nil
@type t :: %__MODULE__{
value: integer(),
next: __MODULE__.t()
}
def new(value) when is_integer(value) do
struct!(__MODULE__, value: value, next: nil)
end
def new(value, %__MODULE__{} = next) do
struct!(__MODULE__, value: value, next: next)
end
end
defmodule LinkedList do
defstruct head: nil,
size: 0
@type t :: %__MODULE__{
head: LinkNode.t(),
size: integer()
}
@doc """
Creates a new linked list
#Example
iex> LinkedList.new()\
|> LinkedList.to_list()
[]
"""
def new() do
struct!(__MODULE__)
end
@doc """
Creates a linked list from a list
#Example
iex>LinkedList.from_list([1,2,3,4]) |> LinkedList.to_list()
[1,2,3,4]
iex>LinkedList.from_list([2,5,8, 1]) |> LinkedList.to_list()
[2,5,8,1]
"""
def from_list(list) do
Enum.reduce(list, new(), fn e, l ->
add(l, e)
end)
end
@doc """
Add a new node to end of linked list
#Example
iex> LinkedList.new()\
|> LinkedList.add(1)\
|> LinkedList.to_list()
[1]
iex> LinkedList.new()\
|> LinkedList.add(1)\
|> LinkedList.add(2)\
|> LinkedList.to_list()
[1, 2]
"""
def add(%__MODULE__{head: nil}, value) do
head = LinkNode.new(value)
%__MODULE__{head: head, size: 1}
end
# add 2, {head: {1, nil}} -> {head: {1, {2, nil}}}
def add(%__MODULE__{head: head, size: size} = l, value) do
new_head = add_with_node(head, value)
# IO.inspect(new_head, label: "Head node")
%__MODULE__{l | head: new_head, size: size + 1}
end
@doc """
Deletes a node at given index from linked list
#Example
iex> l = LinkedList.from_list([1,2,3,4,5])
iex> {_, l} = LinkedList.delete_at(l,0)
iex> LinkedList.to_list(l)
[2,3,4,5]
iex> l = LinkedList.from_list([1,2,3,4,5])
iex> {_, l} = LinkedList.delete_at(l,1)
iex> LinkedList.to_list(l)
[1,3,4,5]
iex> l = LinkedList.from_list([1,2,3,4,5])
iex> {_, l} = LinkedList.delete_at(l,2)
iex> LinkedList.to_list(l)
[1,2,4,5]
iex> l = LinkedList.from_list([1,2,3,4,5])
iex> {_, l} = LinkedList.delete_at(l,3)
iex> LinkedList.to_list(l)
[1,2,3,5]
"""
def delete_at(%__MODULE__{head: nil} = l, _) do
{nil, l}
end
def delete_at(%__MODULE__{head: %LinkNode{next: next} = head, size: size} = l, 0 = _index) do
{head, %{l | head: next, size: size - 1}}
end
def delete_at(%__MODULE__{head: head, size: size} = l, index) do
# {head, %{l | head: next, size: (size - 1)}}
{node, new_head} = delete_with_node_at(head, index - 1)
{node, %{l | head: new_head, size: size - 1}}
end
@doc """
Insert value at the given index in the list
#Example
iex> l = LinkedList.from_list([1,2,3,4,5])
iex> l = LinkedList.insert_at(l, 0, 6)
iex> LinkedList.to_list(l)
[6,1,2,3,4,5]
iex> l = LinkedList.from_list([1,2,3,4,5])
iex> l = LinkedList.insert_at(l, 2, 6)
iex> LinkedList.to_list(l)
[1,2,6,3,4,5]
iex> l = LinkedList.new()
iex> l = LinkedList.insert_at(l, 2, 6)
iex> LinkedList.to_list(l)
[6]
"""
def insert_at(%__MODULE__{head: nil} = l, _index, value) do
add(l, value)
end
def insert_at(%__MODULE__{head: head, size: size} = l, index, value) when index <= 0 do
new_head = LinkNode.new(value, head)
%__MODULE__{l | head: new_head, size: size + 1}
end
def insert_at(%__MODULE__{head: head, size: size} = l, index, value) do
new_head = insert_with_node_at(head, index - 1, value)
%__MODULE__{l | head: new_head, size: size + 1}
end
@doc """
Reverse elements in linked list
#Example
iex> l = LinkedList.from_list([1])
iex> l = LinkedList.reverse(l)
iex> LinkedList.to_list(l)
[1]
iex> l = LinkedList.from_list([1,2])
iex> l = LinkedList.reverse(l)
iex> LinkedList.to_list(l)
[2,1]
iex> l = LinkedList.from_list([1,2,3,4])
iex> l = LinkedList.reverse(l)
iex> LinkedList.to_list(l)
[4,3,2,1]
"""
def reverse(%__MODULE__{head: nil} = l) do
l
end
def reverse(%__MODULE__{head: head} = l) do
%{l | head: reverse_from_node(head, nil)}
end
def to_list(%__MODULE__{head: head}) do
to_list(head, [])
end
def print_all(%__MODULE__{} = l) do
IO.inspect(to_list(l))
end
# PRIVATE FUNCTIONS
# [1, 2, 3] ->
# reverse_from_node(1->2->3)
# reverse_from_node(2->3)
#
defp reverse_from_node(nil, node_next) do
node_next
end
defp reverse_from_node(%{next: next} = node, node_next) do
reverse_from_node(next, %{node | next: node_next})
end
defp delete_with_node_at(%{next: next} = node, 0) do
{next, %{node | next: next.next}}
end
defp delete_with_node_at(%{} = node, index) when index < 0 do
{nil, node}
end
defp delete_with_node_at(%{next: next} = node, index) do
{deleted_node, new_next} = delete_with_node_at(next, index - 1)
{deleted_node, %{node | next: new_next}}
end
defp insert_with_node_at(%LinkNode{next: nil} = node, _index, value) do
next = LinkNode.new(value)
%LinkNode{node | next: next}
end
defp insert_with_node_at(%LinkNode{next: next} = node, 0 = _index, value) do
new_next = LinkNode.new(value, next)
%LinkNode{node | next: new_next}
end
defp insert_with_node_at(%LinkNode{next: next} = node, index, value) do
%LinkNode{node | next: insert_with_node_at(next, index - 1, value)}
end
defp add_with_node(%LinkNode{next: nil} = node, value) do
next = LinkNode.new(value)
%LinkNode{node | next: next}
end
defp add_with_node(%LinkNode{next: next} = node, value) do
%LinkNode{node | next: add_with_node(next, value)}
end
defp to_list(nil, acc) do
:lists.reverse(acc)
end
defp to_list(%LinkNode{value: value, next: next}, acc) do
to_list(next, [value | acc])
end
end
|
lib/linked_list.ex
| 0.835752
| 0.406626
|
linked_list.ex
|
starcoder
|
defmodule WordsWithEnemies.WordFinder do
@moduledoc """
Uses the Stream API to efficiently search a wordlist
for words based on certain criteria. Each function takes
and returns a stream, allowing for easy chaining. When the
chain is complete, a function in the `Enum` module must be
used to retrieve the words. For example:
word_list
|> using("oeznltfkqeihydctv")
|> between(min: 8, max: 12)
|> starting_with("h")
|> Enum.take(1)
["helideck"]
"""
alias WordsWithEnemies.Letters
@wordlist "priv/static/sowpods.txt"
@doc """
Transforms the wordlist into a list, and stores
it in an agent for efficient access later on.
"""
def start_link do
words =
@wordlist
|> File.read!
|> String.split("\n", trim: true)
Agent.start_link(fn -> words end, name: __MODULE__)
end
@doc """
Returns all words in the word list.
"""
@spec word_list :: list
def word_list, do: Agent.get(__MODULE__, &(&1))
@doc """
Returns a stream of `words` that can be made from `letters`.
"""
@spec using(Enumerable.t, list) :: Enumerable.t
@spec using(Enumerable.t, String.t) :: Enumerable.t
def using(words, letters) when is_list(letters) do
Stream.filter(words, &possible?(&1, letters))
end
def using(words, letters) when is_bitstring(letters) do
using(words, String.codepoints(letters))
end
defp possible?(word, letters) when length(word) > length(letters), do: false
defp possible?(word, letters) when is_bitstring(word) do
word
|> String.codepoints
|> do_possible?(letters)
end
defp do_possible?([], _letters), do: true
defp do_possible?([letter|others], letters) do
if letter in letters do
do_possible?(others, List.delete(letters, letter))
else
false
end
end
@doc """
Returns a stream of `words` that are between `min`
and `max` characters in length.
"""
@spec between(Enumerable.t, list) :: Enumerable.t
def between(words, [min: min]) do
Stream.filter(words, &min_length?(&1, min))
end
def between(words, [max: max]) do
Stream.filter(words, &max_length?(&1, max))
end
def between(words, [min: min, max: max]) do
Stream.filter(words, &length_between?(&1, min, max))
end
defp min_length?(word, min) do
String.length(word) >= min
end
defp max_length?(word, max) do
String.length(word) <= max
end
defp length_between?(word, min, max) do
min_length?(word, min) and max_length?(word, max)
end
@doc """
Returns a stream of `words` that meet the criteria in
`constraints`. This is a map containing letters and
the amount they should appear in the word. For example,
`%{a: 4, k: 2}` will return all words with exactly 4 a's and 2 k's.
"""
@spec containing(Enumerable.t, map, list) :: Enumerable.t
def containing(words, constraints, opts \\ []) do
opts = Keyword.put_new(opts, :precise, true)
Stream.filter(words, &contains?(&1, constraints, opts))
end
defp contains?(word, constraints, [precise: precise]) do
word_freqs = Letters.frequency_table(word)
constraints = atom_keys_to_string(constraints)
if has_keys?(word_freqs, constraints) do
word_keys = Map.keys(word_freqs)
constraints
|> Map.keys
|> Enum.all?(&do_contains?(&1, word_freqs, constraints, precise))
else
false
end
end
defp do_contains?(char, word_freqs, constraints, true) do
word_freqs[char] == constraints[char]
end
defp do_contains?(char, word_freqs, constraints, false) do
word_freqs[char] >= constraints[char]
end
defp has_keys?(checking, keys) do
keys
|> Map.keys
|> Enum.all?(&(&1 in Map.keys(checking)))
end
defp atom_keys_to_string(map) do
for {key, val} <- map, into: %{} do
{Atom.to_string(key), val}
end
end
@doc """
Returns a stream of `words` that begin with `prefix`.
"""
@spec starting_with(Enumerable.t, String.t | [String.t]) :: Enumerable.t
def starting_with(words, prefix) do
Stream.filter(words, &String.starts_with?(&1, prefix))
end
@doc """
Returns a stream of `words` that begin with `suffix`.
"""
@spec ending_with(Enumerable.t, String.t | [String.t]) :: Enumerable.t
def ending_with(words, suffix) do
Stream.filter(words, &String.ends_with?(&1, suffix))
end
@doc "Returns `true` if `word` is valid."
@spec valid?(String.t) :: boolean
def valid?(word), do: word in word_list()
@doc """
Returns the similarity value of two words as a value
from 0 - 1, taking into account the length of the words
and the letters that the two have in common.
## Examples
iex> WordFinder.similarity "pineapple", "apple"
1
iex> WordFinder.similarity "cozy", "fullers"
0
"""
@spec similar_to(Enumerable.t, String.t, integer) :: Enumerable.t
def similar_to(words, compare_word, min_similarity) do
Stream.filter(words, fn word ->
similarity(word, compare_word) >= min_similarity
end)
end
@spec similarity(String.t, String.t) :: integer
def similarity(a, b) do
a
|> compare_words(b)
|> do_similarity
end
defp do_similarity({[_|_], []}), do: 0
defp do_similarity
@doc ~S"""
Compares the contents of two strings and removes
any duplicates between them on a 1:1 basis.
## Examples
iex> WordFinder.compare "pineapple", "planet"
{["i", "p", "p", "e"], ["t"]}
iex> WordFinder.compare "pineapple", "apple"
{["p", "i", "n", "e"], []}
"""
@spec compare_words(String.t, String.t) :: {String.t, String.t}
@spec compare_words(list, list) :: {String.t, String.t}
def compare_words(a, b) when is_bitstring(a) and is_bitstring(b) do
a = String.codepoints(a)
b = String.codepoints(b)
compare_words(a, b)
end
def compare_words(word_a, word_b) do
do_compare_words(word_a, word_a, word_b)
end
defp do_compare_words([], word_a, word_b), do: {word_a, word_b}
defp do_compare_words([current|others], word_a, word_b) do
if current in word_a and current in word_b do
word_a = List.delete(word_a, current)
word_b = List.delete(word_b, current)
do_compare_words(others, word_a, word_b)
else
do_compare_words(others, word_a, word_b)
end
end
end
|
lib/words_with_enemies/language/word_finder.ex
| 0.915432
| 0.63775
|
word_finder.ex
|
starcoder
|
defmodule Loom.MVRegister do
@moduledoc """
A causally consistent multi-value register.
A bit more causally rigorous than LWWRegister, MVRegister will replace
observed values when they are set, but concurrent additions will co-occur,
and a list will be returned for the value.
This is good if you have some reasonable way of figuring out how to resolve
this further down your app keychain (including user resolution), but you can't
make it generic enough to work as a CRDT.
"""
alias __MODULE__, as: Reg
alias Loom.Dots
@type actor :: term
@type value :: term
@type t :: %Reg{
dots: Dots.t,
keep_delta: boolean,
delta: Dots.t | nil
}
defstruct dots: %Dots{}, keep_delta: true, delta: nil
@doc """
Returns a new MVRegister CRDT.
`nil` is a new CRDT's identity value, and by default the system time in
microseconds is used as the clock value.
iex> Loom.MVRegister.new |> Loom.MVRegister.value
nil
"""
@spec new :: t
def new, do: %Reg{delta: Dots.new}
@doc """
Grab the delta from an MVRegister for lower-cost synchronization.
iex> alias Loom.MVRegister, as: Reg
iex> a = Reg.new |> Reg.set(:a, 5)
iex> delta_b = Reg.new |> Reg.set(:b, 2) |> Reg.delta
iex> Reg.join(a, delta_b) |> Reg.value |> Enum.sort
[2,5]
"""
@spec delta(t) :: t
def delta(%Reg{delta: delta}), do: %Reg{dots: delta}
@doc """
Clear the delta from an MVRegister to preserve space. Do this after you sync
"enough".
iex> alias Loom.MVRegister, as: Reg
iex> Reg.new |> Reg.set(:a, 5) |> Reg.clear_delta |> Reg.delta == Reg.new |> Reg.delta
true
"""
@spec clear_delta(t) :: t
def clear_delta(%Reg{}=reg), do: %Reg{reg|delta: Dots.new}
@doc """
Sets a value, erasing any current values.
iex> alias Loom.MVRegister, as: Reg
iex> Reg.new
...> |> Reg.set(:a, "test")
...> |> Reg.set(:a, "test2")
...> |> Reg.value
"test2"
"""
@spec set(t, actor, term) :: t
def set(%Reg{dots: d, delta: delta}=reg, actor, value) do
{new_dots, new_delta_dots} = {d, delta}
|> Dots.remove()
|> Dots.add(actor, value)
%Reg{reg|dots: new_dots, delta: new_delta_dots}
end
@doc """
Clear out an MVRegister
iex> alias Loom.MVRegister, as: Reg
iex> Reg.new
...> |> Reg.set(:a, 1)
...> |> Reg.empty()
...> |> Reg.value
nil
"""
@spec empty(t) :: t
def empty(%Reg{dots: d, delta: delta}=reg) do
{new_dots, new_delta_dots} = {d, delta} |> Dots.remove()
%Reg{reg|dots: new_dots, delta: new_delta_dots}
end
@doc """
Joins 2 MVRegisters
iex> alias Loom.MVRegister, as: Reg
iex> a = Reg.new |> Reg.set(:a, "test") |> Reg.set(:a, "test2")
iex> b = Reg.new |> Reg.set(:b, "take over")
iex> Reg.join(a, b) |> Reg.value
["test2", "take over"]
"""
@spec join(t, t) :: t
def join(%Reg{dots: d1}=reg, %Reg{dots: d2}) do
%Reg{reg|dots: Dots.join(d1, d2)}
end
@doc """
Returns the natural value of the register. If there is nothing, it's nil. If
it's one thing, it's that value (this is the normal case). If it's more than
one thing, all values are returned in a list.
"""
@spec value(t) :: [term] | term | nil
def value(%Reg{dots: d}) do
values = (for {_, v} <- Dots.dots(d), do: v) |> Enum.uniq
case values do
[] -> nil
[singleton] -> singleton
mv -> mv
end
end
end
defimpl Loom.CRDT, for: Loom.MVRegister do
alias Loom.MVRegister, as: Reg
@doc """
Returns a description of the operations that this CRDT takes.
Updates return a new CRDT, reads can return any natural datatype. This register
returns a value.
"""
def ops(_crdt) do
[ update: [
set: [:actor, :value],
],
read: [
value: []
]
]
end
@doc """
Applies a CRDT to a counter in an abstract way.
This is for ops-based support.
iex> alias Loom.CRDT
iex> alias Loom.MVRegister, as: Reg
iex> reg = Reg.new |> CRDT.apply({:set, :a, "test"}) |> CRDT.apply({:set, :a, "testing"})
iex> CRDT.apply(reg, :value)
"testing"
"""
def apply(crdt, {:set, actor, value}) do
Reg.set(crdt, actor, value)
end
def apply(crdt, :value), do: Reg.value(crdt)
@doc """
Joins 2 CRDT's of the same type.
2 different types cannot mix (yet). In the future, we may be able to join
different counters and merge their semantics, as long as the datatype grows
monotonically.
iex> alias Loom.CRDT
iex> a = Loom.MVRegister.new |> CRDT.apply({:set, :a, "test"})
iex> b = Loom.MVRegister.new |> CRDT.apply({:set, :b, "test2"})
iex> CRDT.join(a,b) |> CRDT.value |> Enum.sort
["test","test2"]
"""
def join(a, b), do: Reg.join(a, b)
@doc """
Returns the most natural value for a counter, an integer.
"""
def value(crdt), do: Reg.value(crdt)
end
|
lib/loom/mvregister.ex
| 0.839191
| 0.579936
|
mvregister.ex
|
starcoder
|
defmodule Plymio.Codi.Pattern.Delegate do
@moduledoc ~S"""
The *delegate* patterns build `Kernel.defdelegate/2` call(s).
Delegated functions can be built with, optionally, with a `@doc`,
`@since` and/or `@spec`.
See `Plymio.Codi` for an overview and documentation terms
Note the delegated mfa: `{module, function, arity}` is validated
i.e. the `function` must exist in the `module` with the given
`arity`.
If `:delegate_doc` is not in the pattern opts, a default of
`:delegate` is used. (It can be disabled by explicily setting
`:fun_doc` to `nil` - **not** `false`).
## Pattern: *delegate*
Valid keys in the *cpo* are:
| Key | Aliases |
| :--- | :--- |
| `:delegate_module` | *:to, :module, :fun_mod, :fun_module, :function_module* |
| `:delegate_name` | *:as* |
| `:delegate_doc` | *:doc, :fun_doc, :function_doc* |
| `:delegate_args` | *:args, :fun_args, :function_args* |
| `:delegate_arity` | *:arity, :fun_arity, :function_arity* |
| `:fun_name` | *:name, :function_name* |
| `:spec_args` | |
| `:spec_result` |*:result, :fun_result, :function_result* |
| `:since` | |
## Examples
A simple case. Note the automatically generated `:delegate`-format `@doc`.
iex> {:ok, {forms, _}} = [
...> delegate: [name: :fun_one, arity: 1, module: ModuleA],
...> delegate: [name: :fun_due, arity: 2, module: ModuleA],
...> delegate: [name: :fun_tre, arity: 3, module: ModuleA]
...> ] |> produce_codi
...> forms |> harnais_helper_format_forms!
["@doc \"Delegated to `ModuleA.fun_one/1`\"",
"defdelegate(fun_one(var1), to: ModuleA)",
"@doc \"Delegated to `ModuleA.fun_due/2`\"",
"defdelegate(fun_due(var1, var2), to: ModuleA)",
"@doc \"Delegated to `ModuleA.fun_tre/3`\"",
"defdelegate(fun_tre(var1, var2, var3), to: ModuleA)"]
Here showing the auto-generated `@doc` disabled.
iex> {:ok, {forms, _}} = [
...> delegate: [name: :fun_one, arity: 1, module: ModuleA, doc: nil],
...> ] |> produce_codi
...> forms |> harnais_helper_format_forms!
["defdelegate(fun_one(var1), to: ModuleA)"]
This example shows explicit function arguments (`:args`) being given:
iex> {:ok, {forms, _}} = [
...> delegate: [name: :fun_one, args: :opts, module: ModuleA, doc: nil],
...> ] |> produce_codi
...> forms |> harnais_helper_format_forms!
["defdelegate(fun_one(opts), to: ModuleA)"]
Delegating to a different function name (`:as`):
iex> {:ok, {forms, _}} = [
...> delegate: [name: :fun_3, as: :fun_tre, args: [:opts, :key, :value], module: ModuleA, doc: nil],
...> ] |> produce_codi
...> forms |> harnais_helper_format_forms!
["defdelegate(fun_3(opts, key, value), to: ModuleA, as: :fun_tre)"]
Here a `@doc`, `@since`, and `@spec` are generated. Note in the first
example the `:spec_args` are explicily given as well as the
`:spec_result`. In the second no `:spec_args` are given and the
arity used.
iex> {:ok, {forms, _}} = [
...> delegate: [name: :fun_one, arity: 1, module: ModuleA,
...> since: "1.7.9", spec_args: :integer, spec_result: :tuple],
...> ] |> produce_codi
...> forms |> harnais_helper_format_forms!
["@doc \"Delegated to `ModuleA.fun_one/1`\"",
"@since \"1.7.9\"",
"@spec fun_one(integer) :: tuple",
"defdelegate(fun_one(var1), to: ModuleA)"]
iex> {:ok, {forms, _}} = [
...> delegate: [name: :fun_one, arity: 1, module: ModuleA,
...> since: "1.7.9", spec_result: :tuple],
...> ] |> produce_codi
...> forms |> harnais_helper_format_forms!
["@doc \"Delegated to `ModuleA.fun_one/1`\"",
"@since \"1.7.9\"",
"@spec fun_one(any) :: tuple",
"defdelegate(fun_one(var1), to: ModuleA)"]
Showing validation of the `mfa`:
iex> {:error, error} = [
...> delegate: [name: :fun_one, arity: 2, module: ModuleZ],
...> ] |> produce_codi
...> error |> Exception.message
"mfa {ModuleZ, :fun_one, 2} module unknown"
iex> {:error, error} = [
...> delegate: [name: :fun_1, arity: 2, module: ModuleA],
...> ] |> produce_codi
...> error |> Exception.message
"mfa {ModuleA, :fun_1, 2} function unknown"
iex> {:error, error} = [
...> delegate: [name: :fun_one, arity: 2, module: ModuleA],
...> ] |> produce_codi
...> error |> Exception.message
"mfa {ModuleA, :fun_one, 2} arity unknown"
## Pattern: *delegate_module*
The *delegate_module* pattern builds a delegate function
for one or more functions in a module.
As with `:delegate`, `@doc` and/or `@since` can be generated at the same time.
Valid keys in the *cpo* are:
| Key | Aliases |
| :--- | :--- |
| `:delegate_module` | *:to :module, :fun_module, :fun_mod, :function_module* |
| `:delegate_doc` | *:doc, :fun_doc, :function_doc* |
| `:take` | |
| `:drop` | |
| `:filter` | |
| `:reject` | |
| `:since` | |
To determine which functions to delegate, the "function v arity"
(*fva*) for the module is first obtained by calling e.g. `ModuleA.__info__(:functions)`.
The *delegate options* can include `:take`, `:drop`, `:filter` or
`:reject` keys to "edit" the *fva*..
The first two take zero, one or more function names
and are used in a call to e.g. `Keyword.take/2` with the *fva*.
The second two keys require an arity 1 function (predicate) passed a
`{fun,arity}` tuple, returning `true` or `false` and is used with e.g. `Enum.filter/2`.
> Note the fva edits are applied in order of occurence so `:take`-ing a function already `:reject`-ed will do nothing.
Here all functions in the module (`ModuleA`) are wanted with auto-generated `@doc` and `@since`:
iex> {:ok, {forms, _}} = [
...> delegate_module: [module: ModuleA, since: "1.7.9"],
...> ] |> produce_codi
...> forms |> harnais_helper_format_forms!
["@doc \"Delegated to `ModuleA.fun_due/2`\"",
"@since \"1.7.9\"",
"defdelegate(fun_due(var1, var2), to: ModuleA)",
"@doc \"Delegated to `ModuleA.fun_one/1`\"",
"@since \"1.7.9\"",
"defdelegate(fun_one(var1), to: ModuleA)",
"@doc \"Delegated to `ModuleA.fun_tre/3`\"",
"@since \"1.7.9\"",
"defdelegate(fun_tre(var1, var2, var3), to: ModuleA)"]
Here arity 2 funs are selected, and `@doc` is disabled.
iex> {:ok, {forms, _}} = [
...> delegate_module: [
...> module: ModuleA, doc: nil,
...> filter: fn {_fun,arity} -> arity == 3 end],
...> ] |> produce_codi
...> forms |> harnais_helper_format_forms!
["defdelegate(fun_tre(var1, var2, var3), to: ModuleA)"]
"""
alias Plymio.Codi, as: CODI
use Plymio.Fontais.Attribute
use Plymio.Codi.Attribute
import Plymio.Fontais.Option,
only: [
opts_canonical_keys: 2,
opts_take_canonical_keys: 2,
opts_create_aliases_dict: 1
]
import Plymio.Codi.Utility,
only: [
cpo_resolve_fun_name: 1,
cpo_resolve_delegate_module: 1,
cpo_resolve_delegate_name: 1,
cpo_resolve_delegate_doc: 1,
cpo_resolve_delegate_args: 1
]
import Plymio.Codi.Utility.Module,
only: [
reduce_module_fva: 2,
state_validate_mfa: 2,
state_resolve_module_fva: 2
]
import Plymio.Funcio.Enum.Map.Collate,
only: [
map_collate0_enum: 2
]
import Plymio.Codi.CPO
@pattern_delegate_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_since,
@plymio_codi_key_alias_fun_name,
@plymio_codi_key_alias_delegate_doc,
@plymio_codi_key_alias_delegate_module,
@plymio_codi_key_alias_delegate_name,
@plymio_codi_key_alias_delegate_args,
@plymio_codi_key_alias_delegate_arity,
# limited aliases
{@plymio_codi_key_typespec_spec_args, [:spec_args]},
@plymio_codi_key_alias_typespec_spec_result,
@plymio_codi_key_alias_forms_edit
]
@pattern_delegate_dict_alias @pattern_delegate_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_delegate_normalise(opts, dict \\ nil) do
opts |> opts_take_canonical_keys(dict || @pattern_delegate_dict_alias)
end
@pattern_delegate_module_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_since,
@plymio_codi_key_alias_delegate_module,
@plymio_codi_key_alias_delegate_doc,
{@plymio_codi_key_take, nil},
{@plymio_codi_key_drop, nil},
{@plymio_codi_key_filter, nil},
{@plymio_codi_key_reject, nil},
@plymio_codi_key_alias_forms_edit
]
@pattern_delegate_module_dict_alias @pattern_delegate_module_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_delegate_module_normalise(opts, dict \\ nil) do
opts |> opts_canonical_keys(dict || @pattern_delegate_module_dict_alias)
end
@doc false
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_delegate do
with {:ok, cpo} <- cpo |> cpo_pattern_delegate_normalise,
{:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, delegate_module} <- cpo |> cpo_resolve_delegate_module,
{:ok, cpo} <- cpo |> cpo_maybe_put_delegate_name(fun_name),
{:ok, delegate_name} <- cpo |> cpo_resolve_delegate_name,
{:ok, delegate_args} <- cpo |> cpo_resolve_delegate_args,
{:ok, cpo} <- cpo |> cpo_maybe_put_delegate_doc(@plymio_codi_doc_type_delegate),
{:ok, delegate_doc} <- cpo |> cpo_resolve_delegate_doc,
{:ok, {_, %CODI{} = state}} <-
state |> state_validate_mfa({delegate_module, delegate_name, length(delegate_args)}),
# base dependent cpo
{:ok, depend_cpo} <- cpo |> cpo_mark_status_active,
{:ok, depend_cpo} <- depend_cpo |> cpo_put_fun_module(delegate_module),
# the dependent doc cpo
{:ok, depend_doc_cpo} <- depend_cpo |> cpo_put_pattern(@plymio_codi_pattern_doc),
{:ok, depend_doc_cpo} <- depend_doc_cpo |> cpo_put_fun_doc(delegate_doc),
{:ok, depend_doc_cpo} <- depend_doc_cpo |> cpo_put_fun_args(delegate_args),
# the dependent since cpo
{:ok, depend_since_cpo} <- depend_cpo |> cpo_put_pattern(@plymio_codi_pattern_since),
# the dependent type cpo
{:ok, depend_type_cpo} <-
depend_cpo |> cpo_put_pattern(@plymio_codi_pattern_typespec_spec),
{:ok, depend_type_cpo} <-
depend_type_cpo
|> cpo_maybe_add_typespec_spec_opts([
{@plymio_codi_key_typespec_spec_arity, delegate_args |> length}
]),
true <- true do
delegate_opts =
(delegate_name == fun_name)
|> case do
true ->
[to: delegate_module]
_ ->
[to: delegate_module, as: delegate_name]
end
pattern_form =
quote do
defdelegate unquote(fun_name)(unquote_splicing(delegate_args)), unquote(delegate_opts)
end
depend_patterns = [
depend_doc_cpo,
depend_since_cpo,
depend_type_cpo
]
with {:ok, %CODI{} = depend_state} <- state |> CODI.update_snippets(depend_patterns),
{:ok, {depend_product, %CODI{}}} <-
depend_state |> Plymio.Codi.Stage.Normalise.normalise_snippets(),
{:ok, depend_cpos} <- depend_product |> cpo_fetch_patterns,
{:ok, cpo} <- cpo |> cpo_done_with_edited_form(pattern_form) do
cpos = depend_cpos ++ [cpo]
{:ok, {cpos, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(%CODI{} = state, pattern, opts)
when pattern == @plymio_codi_pattern_delegate_module do
with {:ok, opts} <- opts |> cpo_pattern_delegate_module_normalise,
{:ok, delegate_module} <- opts |> cpo_fetch_delegate_module,
{:ok, {delegate_fva, %CODI{} = state}} <-
state |> state_resolve_module_fva(delegate_module),
{:ok, delegate_fva} <- delegate_fva |> reduce_module_fva(opts),
{:ok, delegate_cpo} <- opts |> cpo_pattern_delegate_normalise,
{:ok, delegate_cpo} <- delegate_cpo |> cpo_mark_status_active,
{:ok, delegate_cpo} <- delegate_cpo |> cpo_put_pattern(@plymio_codi_pattern_delegate) do
delegate_fva
|> map_collate0_enum(fn {name, arity} ->
with {:ok, cpo} <- delegate_cpo |> cpo_put_fun_name(name),
{:ok, cpo} <- cpo |> cpo_put_delegate_name(name),
{:ok, _cpo} = result <- cpo |> cpo_put_delegate_arity(arity) do
result
else
{:error, %{__exception__: true}} = result -> result
end
end)
|> case do
{:error, %{__struct__: _}} = result -> result
{:ok, cpos} -> {:ok, {cpos, state}}
end
else
{:error, %{__exception__: true}} = result -> result
end
end
end
|
lib/codi/pattern/delegate/delegate.ex
| 0.89769
| 0.671349
|
delegate.ex
|
starcoder
|
defmodule Cluster.Strategy.Consul do
@moduledoc """
This clustering strategy is specific to the Consul service networking
solution. It works by querying the platform's metadata API for containers
belonging to a given service name and attempts to connect them
(see: https://www.consul.io/api/catalog.html).
There is also the option to require connecting to nodes from different
datacenters, or you can stick to a single datacenter.
It assumes that all nodes share a base name and are using longnames of the
form `<basename>@<ip>` where the `<ip>` is unique for each node.
The Consul service registration isn't part of this module as there are many
different ways to accomplish that, so it is assumed you'll do that from
another part of your application.
An example configuration is below:
config :libcluster,
topologies: [
consul_example: [
strategy: #{__MODULE__},
config: [
# The base agent URL.
base_url: "http://consul.service.dc1.consul:8500",
# If authentication is needed, set the access token here.
access_token: "0<PASSWORD>",
# Nodes list will be refreshed using Consul on each interval.
polling_interval: 10_000,
# The Consul endpoints used to fetch service nodes.
list_using: [
# If you want to use the Agent HTTP API as specified in
# https://www.consul.io/api/agent.html
Cluster.Strategy.Consul.Agent,
# If you want to use the Health HTTP Endpoint as specified in
# https://www.consul.io/api/health.html
{Cluster.Strategy.Consul.Health, [passing: true]},
# If you want to use the Catalog HTTP API as specified in
# https://www.consul.io/api/catalog.html
Cluster.Strategy.Consul.Catalog,
# If you want to join nodes from multiple datacenters, do:
{Cluster.Strategy.Consul.Multisite, [
datacenters: ["dc1", "dc2", "dc3", ...],
endpoints: [
... further endpoints ...
]
]},
# You can also list all datacenters:
{Cluster.Strategy.Consul.Multisite, [
datacenters: :all,
endpoints: [
... further endpoints ...
]
]},
]
# All configurations below are defined as default for all
# children endpoints.
# Datacenter parameter while querying.
dc: "dc1",
# The default service_name for children endpoints specifications.
service_name: "my-service",
# This is the node basename, the Name (first) part of an Erlang
# node name (before the @ part. If not specified, it will assume
# the same name as the current running node.
node_basename: "app_name",
# This is the EEx template used to build the node names. The
# variables `ip`, `dc` and `node_basename` are available to
# compose the node name.
node_name_template: "<%= node_basename =>@<%= ip =>"
]]]
"""
use GenServer
use Cluster.Strategy
alias Cluster.Strategy.State
@callback get_nodes(%State{}) :: [atom()]
@default_polling_interval 5_000
@default_base_url "http://localhost:8500"
@default_node_name_template "<%= node_basename %>@<%= ip %>"
def start_link(args), do: GenServer.start_link(__MODULE__, args)
@impl true
def init([%State{meta: nil} = state]), do: init([%State{state | :meta => MapSet.new()}])
def init([%State{config: config} = state]) do
state =
case Keyword.get(config, :node_basename) do
nil ->
[node_basename, _] =
node()
|> to_string()
|> String.split("@")
%{state | config: Keyword.put(config, :node_basename, node_basename)}
app_name when is_binary(app_name) and app_name != "" ->
state
app_name ->
raise ArgumentError,
"Consul strategy is selected, but :node_basename" <>
" is invalid, got: #{inspect(app_name)}"
end
{:ok, state, 0}
end
@impl true
def handle_info(:timeout, state), do: {:noreply, load(state), polling_interval(state)}
defp load(
%State{
topology: topology,
connect: connect,
disconnect: disconnect,
list_nodes: list_nodes
} = state
) do
new_nodelist = MapSet.new(get_nodes(state))
removed = MapSet.difference(state.meta, new_nodelist)
new_nodelist =
case Cluster.Strategy.disconnect_nodes(
topology,
disconnect,
list_nodes,
MapSet.to_list(removed)
) do
:ok ->
new_nodelist
{:error, bad_nodes} ->
# Add back the nodes which should have been removed, but which couldn't be for some reason
Enum.reduce(bad_nodes, new_nodelist, fn {n, _}, acc ->
MapSet.put(acc, n)
end)
end
new_nodelist =
case Cluster.Strategy.connect_nodes(
topology,
connect,
list_nodes,
MapSet.to_list(new_nodelist)
) do
:ok ->
new_nodelist
{:error, bad_nodes} ->
# Remove the nodes which should have been added, but couldn't be for some reason
Enum.reduce(bad_nodes, new_nodelist, fn {n, _}, acc ->
MapSet.delete(acc, n)
end)
end
%{state | meta: new_nodelist}
end
def get_nodes(%State{config: config} = state) do
config
|> Keyword.fetch!(:list_using)
|> Enum.flat_map(fn
{endpoint, opts} ->
endpoint.get_nodes(%{state | config: Keyword.merge(config, opts)})
endpoint ->
endpoint.get_nodes(state)
end)
end
defp polling_interval(%{config: config}) do
Keyword.get(config, :polling_interval, @default_polling_interval)
end
def base_url(config) do
base_url =
config
|> Keyword.get(:base_url, @default_base_url)
|> URI.parse()
case Keyword.get(config, :dc) do
nil ->
base_url
dc ->
query =
(base_url.query || "")
|> URI.decode_query(%{"dc" => dc})
|> URI.encode_query()
%{base_url | query: query}
end
end
def headers(config) do
case Keyword.get(config, :access_token) do
nil ->
[]
access_token ->
[{"authorization", "Bearer #{access_token}"}]
end
end
def node_name(ip, config) do
template = Keyword.get(config, :node_name_template, @default_node_name_template)
opts = [
ip: ip,
dc: Keyword.get(config, :dc),
node_basename: Keyword.fetch!(config, :node_basename)
]
:"#{EEx.eval_string(template, opts)}"
end
end
|
lib/strategy/consul.ex
| 0.847385
| 0.498413
|
consul.ex
|
starcoder
|
defmodule Estated.Property.Structure do
@moduledoc "Information about the building(s) on the parcel."
@moduledoc since: "0.2.0"
alias Estated.Property.Structure.OtherArea
alias Estated.Property.Structure.OtherFeature
alias Estated.Property.Structure.OtherImprovement
defstruct year_built: nil,
effective_year_built: nil,
stories: nil,
rooms_count: nil,
beds_count: nil,
baths: nil,
partial_baths_count: nil,
units_count: nil,
parking_type: nil,
parking_spaces_count: nil,
pool_type: nil,
architecture_type: nil,
construction_type: nil,
exterior_wall_type: nil,
foundation_type: nil,
roof_material_type: nil,
roof_style_type: nil,
heating_type: nil,
heating_fuel_type: nil,
air_conditioning_type: nil,
fireplaces: nil,
basement_type: nil,
quality: nil,
condition: nil,
flooring_types: [],
plumbing_fixtures_count: nil,
interior_wall_type: nil,
water_type: nil,
sewer_type: nil,
total_area_sq_ft: nil,
other_areas: [],
other_features: [],
other_improvements: [],
other_rooms: [],
amenities: []
@typedoc "Information about the building(s) on the parcel."
@typedoc since: "0.2.0"
@type t :: %__MODULE__{
year_built: year_built() | nil,
effective_year_built: effective_year_built() | nil,
stories: stories() | nil,
rooms_count: rooms_count() | nil,
beds_count: beds_count() | nil,
baths: baths() | nil,
partial_baths_count: partial_baths_count() | nil,
units_count: units_count() | nil,
parking_type: parking_type() | nil,
parking_spaces_count: parking_spaces_count() | nil,
pool_type: pool_type() | nil,
architecture_type: architecture_type() | nil,
construction_type: construction_type() | nil,
exterior_wall_type: exterior_wall_type() | nil,
foundation_type: foundation_type() | nil,
roof_material_type: roof_material_type() | nil,
roof_style_type: roof_style_type() | nil,
heating_type: heating_type() | nil,
heating_fuel_type: heating_fuel_type() | nil,
air_conditioning_type: air_conditioning_type() | nil,
fireplaces: fireplaces() | nil,
basement_type: basement_type() | nil,
quality: quality() | nil,
condition: condition() | nil,
flooring_types: flooring_types() | nil,
plumbing_fixtures_count: plumbing_fixtures_count() | nil,
interior_wall_type: interior_wall_type() | nil,
water_type: water_type() | nil,
sewer_type: sewer_type() | nil,
total_area_sq_ft: total_area_sq_ft() | nil,
other_areas: [OtherArea.t()],
other_features: [OtherFeature.t()],
other_improvements: [OtherImprovement.t()],
other_rooms: other_rooms(),
amenities: amenities()
}
@typedoc """
The year the structure was built.
Eg. **1923**
"""
@typedoc since: "0.2.0"
@type year_built :: integer()
@typedoc """
The year a structure was substantially updated or improved.
Eg. **1996**
"""
@typedoc since: "0.2.0"
@type effective_year_built :: integer()
@typedoc """
The number of stories comprising the structure (may include fractional stories and alphabetic
codes).
Eg. **2[+A](https://estated.com/developers/docs/v4/property/enum-overview#stories)**
"""
@typedoc since: "0.2.0"
@type stories :: String.t()
@typedoc """
The total number of rooms in the building (not just bedrooms).
Eg. **4**
"""
@typedoc since: "0.2.0"
@type rooms_count :: integer()
@typedoc """
The number of bedrooms in the building.
Eg. **3**
"""
@typedoc since: "0.2.0"
@type beds_count :: integer()
@typedoc """
The total number of bathrooms in the building, including half baths.
Eg. **2.5**
"""
@typedoc since: "0.2.0"
@type baths :: float()
@typedoc """
The number of partial bathrooms in the building, when available.
Eg. **2**
"""
@typedoc since: "0.2.0"
@type partial_baths_count :: integer()
@typedoc """
Total number of units reported to the county.
Eg. **1**
"""
@typedoc since: "0.2.0"
@type units_count :: integer()
@typedoc """
The type of parking available.
Eg. [**DETACHED GARAGE**](https://estated.com/developers/docs/v4/property/enum-overview#parking_type)
"""
@typedoc since: "0.2.0"
@type parking_type :: String.t()
@typedoc """
The total number of available parking spaces; including garage, carport, driveway.
Eg. **2**
"""
@typedoc since: "0.2.0"
@type parking_spaces_count :: integer()
@typedoc """
Type of pool located on the property - shared or private.
Eg. [**HEATED POOL**](https://estated.com/developers/docs/v4/property/enum-overview#pool_type)
"""
@typedoc since: "0.2.0"
@type pool_type :: String.t()
@typedoc """
Style or historical period of the primary structure.
Eg. [**SPANISH**](https://estated.com/developers/docs/v4/property/enum-overview#architecture_type)
"""
@typedoc since: "0.2.0"
@type architecture_type :: String.t()
@typedoc """
Type of material used in construction of the building.
Eg. [**WOOD**](https://estated.com/developers/docs/v4/property/enum-overview#construction_type)
"""
@typedoc since: "0.2.0"
@type construction_type :: String.t()
@typedoc """
Material used for the exterior walls of the building.
Eg. [**STUCCO**](https://estated.com/developers/docs/v4/property/enum-overview#exterior_wall_type)
"""
@typedoc since: "0.2.0"
@type exterior_wall_type :: String.t()
@typedoc """
The type of material used in the foundation.
Eg. [**CONCRETE**](https://estated.com/developers/docs/v4/property/enum-overview#foundation_type)
"""
@typedoc since: "0.2.0"
@type foundation_type :: String.t()
@typedoc """
The material used for the roof of the building.
Eg. [**ASPHALT**](https://estated.com/developers/docs/v4/property/enum-overview#roof_material_type)
"""
@typedoc since: "0.2.0"
@type roof_material_type :: String.t()
@typedoc """
The architectural style for the roof of the building.
Eg. [**GABLE**](https://estated.com/developers/docs/v4/property/enum-overview#roof_style_type)
"""
@typedoc since: "0.2.0"
@type roof_style_type :: String.t()
@typedoc """
Primary heating type for the building.
Eg. [**BASEBOARD**](https://estated.com/developers/docs/v4/property/enum-overview#heating_type)
"""
@typedoc since: "0.2.0"
@type heating_type :: String.t()
@typedoc """
Type of fuel used to heat the building.
Eg. [**GAS**](https://estated.com/developers/docs/v4/property/enum-overview#heating_fuel_type)
"""
@typedoc since: "0.2.0"
@type heating_fuel_type :: String.t()
@typedoc """
Air conditioning type for the building.
Eg. [**CENTRAL**](https://estated.com/developers/docs/v4/property/enum-overview#air_conditioning_type)
"""
@typedoc since: "0.2.0"
@type air_conditioning_type :: String.t()
@typedoc """
Total number of fireplaces in the building (can also be "YES")
Eg. **2**
"""
@typedoc since: "0.2.0"
@type fireplaces :: String.t()
@typedoc """
Basement type for the building.
Eg. [**FULL BASEMENT**](https://estated.com/developers/docs/v4/property/enum-overview#basement_type)
"""
@typedoc since: "0.2.0"
@type basement_type :: String.t()
@typedoc """
The quality of the structure rated from A+ to E+.
This grade is determined by the county and is based on numerous, non-standard factors. For
example, a mobile home would likely have a lower score than a mansion as the mobile home uses
cheaper, lower quality materials and has less features.
Eg. **A+**
"""
@typedoc since: "0.2.0"
@type quality :: String.t()
@typedoc """
Current condition of the structure provided by the county.
This relates to things like whether or not there is visible wear on the structure (e.g. chipped
paint, siding falling off). The method for determining this varies across counties.
Eg. **FAIR**
"""
@typedoc since: "0.2.0"
@type condition :: String.t()
@typedoc """
Type of flooring used in improvements in the building.
Eg. [**CARPET, MARBLE**](https://estated.com/developers/docs/v4/property/enum-overview#flooring_types)
"""
@typedoc since: "0.2.0"
@type flooring_types :: [String.t()]
@typedoc """
Total number of all plumbing fixtures in the building.
Eg. **1**
"""
@typedoc since: "0.2.0"
@type plumbing_fixtures_count :: integer()
@typedoc """
The type of material used for the interior walls.
Eg. [**PLASTER**](https://estated.com/developers/docs/v4/property/enum-overview#interior_wall_type)
"""
@typedoc since: "0.2.0"
@type interior_wall_type :: String.t()
@typedoc """
The water system for the property.
Eg. [**MUNICIPAL**](https://estated.com/developers/docs/v4/property/enum-overview#water_type)
"""
@typedoc since: "0.2.0"
@type water_type :: String.t()
@typedoc """
The waste disposal/sewage system for the property.
Eg. [**SEPTIC**](https://estated.com/developers/docs/v4/property/enum-overview#sewer_type)
"""
@typedoc since: "0.2.0"
@type sewer_type :: String.t()
@typedoc """
Total square footage of structure.
Eg. **1692**
"""
@typedoc since: "0.2.0"
@type total_area_sq_ft :: integer()
@typedoc """
List of other rooms within the building.
Eg. [**LAUNDRY ROOM, HOME OFFICE**](https://estated.com/developers/docs/v4/property/enum-overview#other_rooms)
"""
@typedoc since: "0.2.0"
@type other_rooms :: [String.t()]
@typedoc """
List of amenities included in the property.
Eg. [**WINE CELLAR, TENNIS COURT**](https://estated.com/developers/docs/v4/property/enum-overview#amenities)
"""
@typedoc since: "0.2.0"
@type amenities :: [String.t()]
@doc false
@doc since: "0.2.0"
@spec cast(map()) :: t()
def cast(%{} = structure) do
Enum.reduce(structure, %__MODULE__{}, &cast_field/2)
end
@spec cast(nil) :: nil
def cast(nil) do
nil
end
defp cast_field({"year_built", year_built}, acc) do
%__MODULE__{acc | year_built: year_built}
end
defp cast_field({"effective_year_built", effective_year_built}, acc) do
%__MODULE__{acc | effective_year_built: effective_year_built}
end
defp cast_field({"stories", stories}, acc) do
%__MODULE__{acc | stories: stories}
end
defp cast_field({"rooms_count", rooms_count}, acc) do
%__MODULE__{acc | rooms_count: rooms_count}
end
defp cast_field({"beds_count", beds_count}, acc) do
%__MODULE__{acc | beds_count: beds_count}
end
defp cast_field({"baths", baths}, acc) do
%__MODULE__{acc | baths: baths}
end
defp cast_field({"partial_baths_count", partial_baths_count}, acc) do
%__MODULE__{acc | partial_baths_count: partial_baths_count}
end
defp cast_field({"units_count", units_count}, acc) do
%__MODULE__{acc | units_count: units_count}
end
defp cast_field({"parking_type", parking_type}, acc) do
%__MODULE__{acc | parking_type: parking_type}
end
defp cast_field({"parking_spaces_count", parking_spaces_count}, acc) do
%__MODULE__{acc | parking_spaces_count: parking_spaces_count}
end
defp cast_field({"pool_type", pool_type}, acc) do
%__MODULE__{acc | pool_type: pool_type}
end
defp cast_field({"architecture_type", architecture_type}, acc) do
%__MODULE__{acc | architecture_type: architecture_type}
end
defp cast_field({"construction_type", construction_type}, acc) do
%__MODULE__{acc | construction_type: construction_type}
end
defp cast_field({"exterior_wall_type", exterior_wall_type}, acc) do
%__MODULE__{acc | exterior_wall_type: exterior_wall_type}
end
defp cast_field({"foundation_type", foundation_type}, acc) do
%__MODULE__{acc | foundation_type: foundation_type}
end
defp cast_field({"roof_material_type", roof_material_type}, acc) do
%__MODULE__{acc | roof_material_type: roof_material_type}
end
defp cast_field({"roof_style_type", roof_style_type}, acc) do
%__MODULE__{acc | roof_style_type: roof_style_type}
end
defp cast_field({"heating_type", heating_type}, acc) do
%__MODULE__{acc | heating_type: heating_type}
end
defp cast_field({"heating_fuel_type", heating_fuel_type}, acc) do
%__MODULE__{acc | heating_fuel_type: heating_fuel_type}
end
defp cast_field({"air_conditioning_type", air_conditioning_type}, acc) do
%__MODULE__{acc | air_conditioning_type: air_conditioning_type}
end
defp cast_field({"fireplaces", fireplaces}, acc) do
%__MODULE__{acc | fireplaces: fireplaces}
end
defp cast_field({"basement_type", basement_type}, acc) do
%__MODULE__{acc | basement_type: basement_type}
end
defp cast_field({"quality", quality}, acc) do
%__MODULE__{acc | quality: quality}
end
defp cast_field({"condition", condition}, acc) do
%__MODULE__{acc | condition: condition}
end
defp cast_field({"flooring_types", flooring_types}, acc) do
%__MODULE__{acc | flooring_types: flooring_types}
end
defp cast_field({"plumbing_fixtures_count", plumbing_fixtures_count}, acc) do
%__MODULE__{acc | plumbing_fixtures_count: plumbing_fixtures_count}
end
defp cast_field({"interior_wall_type", interior_wall_type}, acc) do
%__MODULE__{acc | interior_wall_type: interior_wall_type}
end
defp cast_field({"water_type", water_type}, acc) do
%__MODULE__{acc | water_type: water_type}
end
defp cast_field({"sewer_type", sewer_type}, acc) do
%__MODULE__{acc | sewer_type: sewer_type}
end
defp cast_field({"total_area_sq_ft", total_area_sq_ft}, acc) do
%__MODULE__{acc | total_area_sq_ft: total_area_sq_ft}
end
defp cast_field({"other_areas", other_areas}, acc) do
%__MODULE__{acc | other_areas: OtherArea.cast_list(other_areas)}
end
defp cast_field({"other_features", other_features}, acc) do
%__MODULE__{acc | other_features: OtherFeature.cast_list(other_features)}
end
defp cast_field({"other_improvements", other_improvements}, acc) do
%__MODULE__{acc | other_improvements: OtherImprovement.cast_list(other_improvements)}
end
defp cast_field({"other_rooms", other_rooms}, acc) do
%__MODULE__{acc | other_rooms: other_rooms}
end
defp cast_field({"amenities", amenities}, acc) do
%__MODULE__{acc | amenities: amenities}
end
defp cast_field(_map_entry, acc) do
acc
end
end
|
lib/estated/property/structure.ex
| 0.865651
| 0.487673
|
structure.ex
|
starcoder
|
defmodule EctoSearcher.Mapping do
@moduledoc """
Behaviour for search query, matcher and field mappings
## Usage
Either adopt `EctoSearcher.Mapping` behaviour and implement callbacks or `use EctoSearcher.Mapping`, which provides defaults.
```elixir
defmodule CustomMapping do
use EctoSearcher.Mapping
require Ecto.Query
alias Ecto.Query
def matchers
%{
"not_eq" => fn(field, value) -> Query.dynamic([q], ^field != ^value) end
}
end
end
```
"""
@type aggregated_matcher() :: %{query: Ecto.Query.dynamic(), aggregation: atom()}
@type matcher() :: (atom(), any() -> Ecto.Query.dynamic() | aggregated_matcher())
@doc """
Should return map with search matchers
Search matcher map should look like:
```elixir
%{
"not_eq" => fn(field, value) -> Query.dynamic([q], ^field != ^value) end
"in" => %{
query: fn field, value -> Query.dynamic([q], ^field in ^value) end,
aggregation: :array
}
}
```
Matcher name will be matched as search field suffix.
Values should either be a query function or a map with query function as `:query` and value aggregate type as `:aggregation`.
Query function will be called with arguments `field` (`atom`) and `value` (casted to specific type) and should return `Ecto.Query.DynamicExpr`.
"""
@callback matchers() :: %{String.t() => matcher()}
@type typed_field_query() :: %{query: Ecto.Query.dynamic(), type: Ecto.Type.t()}
@type field() :: Ecto.Query.dynamic() | typed_field_query()
@doc """
Should return map with field queries
Field queries map should look like:
```elixir
%{
id_alias: Query.dynamic([q], q.id),
datetime_field_as_date: %{
query: Query.dynamic([q], fragment("?::date", q.datetime_field)),
type: :date
}
}
```
Field name will be matched as search field prefix (from `searchable_fields`).
Values should either be a `Ecto.Query.DynamicExpr` or a map with `Ecto.Query.DynamicExpr` as `:query` and value type as `:type`.
`EctoSearcher.Searcher.search/5` and `EctoSearcher.Sorter.sort/5` looks up fields in mapping first, then looks up fields in schema.
"""
@callback fields() :: %{atom() => field()}
defmacro __using__(_) do
quote do
@behaviour EctoSearcher.Mapping
@doc """
Callback implementation for `c:EctoSearcher.Mapping.matchers/0`
"""
def matchers, do: EctoSearcher.Mapping.Default.matchers()
@doc """
Callback implementation for `c:EctoSearcher.Mapping.fields/0`
"""
def fields, do: %{}
defoverridable matchers: 0, fields: 0
end
end
end
|
lib/ecto_searcher/mapping.ex
| 0.888638
| 0.783947
|
mapping.ex
|
starcoder
|
defmodule Hammer.Backend.ETS do
@moduledoc """
An ETS backend for Hammer
The public API of this module is used by Hammer to store information about rate-limit 'buckets'.
A bucket is identified by a `key`, which is a tuple `{bucket_number, id}`.
The essential schema of a bucket is: `{key, count, created_at, updated_at}`, although backends
are free to store and retrieve this data in whichever way they wish.
Use `start` or `start_link` to start the server:
{:ok, pid} = Hammer.Backend.ETS.start_link(args)
`args` is a keyword list:
- `ets_table_name`: (atom) table name to use, defaults to `:hammer_ets_buckets`
- `expiry_ms`: (integer) time in ms before a bucket is auto-deleted,
should be larger than the expected largest size/duration of a bucket
- `cleanup_interval_ms`: (integer) time between cleanup runs,
Example:
Hammer.Backend.ETS.start_link(
expiry_ms: 1000 * 60 * 60,
cleanup_interval_ms: 1000 * 60 * 10
)
"""
@behaviour Hammer.Backend
use GenServer
alias Hammer.Utils
## Public API
def start do
start([])
end
def start(args) do
GenServer.start(__MODULE__, args, name: __MODULE__)
end
def start_link do
start_link([])
end
@doc """
"""
def start_link(args) do
GenServer.start_link(__MODULE__, args, name: __MODULE__)
end
def stop do
GenServer.call(__MODULE__, :stop)
end
@doc """
Record a hit in the bucket identified by `key`
"""
@spec count_hit(key :: {bucket :: integer, id :: String.t()}, now :: integer) ::
{:ok, count :: integer}
| {:error, reason :: any}
def count_hit(key, now) do
GenServer.call(__MODULE__, {:count_hit, key, now})
end
@doc """
Retrieve information about the bucket identified by `key`
"""
@spec get_bucket(key :: {bucket :: integer, id :: String.t()}) ::
{:ok,
{key :: {bucket :: integer, id :: String.t()}, count :: integer, created :: integer,
updated :: integer}}
| {:ok, nil}
| {:error, reason :: any}
def get_bucket(key) do
GenServer.call(__MODULE__, {:get_bucket, key})
end
@doc """
Delete all buckets associated with `id`.
"""
@spec delete_buckets(id :: String.t()) ::
{:ok, count_deleted :: integer}
| {:error, reason :: any}
def delete_buckets(id) do
GenServer.call(__MODULE__, {:delete_buckets, id})
end
## GenServer Callbacks
def init(args) do
ets_table_name = Keyword.get(args, :ets_table_name, :hammer_ets_buckets)
cleanup_interval_ms = Keyword.get(args, :cleanup_interval_ms)
expiry_ms = Keyword.get(args, :expiry_ms)
:ets.new(ets_table_name, [:named_table, :ordered_set])
:timer.send_interval(cleanup_interval_ms, :prune)
state = %{
ets_table_name: ets_table_name,
cleanup_interval_ms: cleanup_interval_ms,
expiry_ms: expiry_ms
}
{:ok, state}
end
def handle_call(:stop, _from, state) do
{:stop, :normal, :ok, state}
end
def handle_call({:count_hit, key, now}, _from, state) do
%{ets_table_name: tn} = state
try do
if :ets.member(tn, key) do
[count, _, _] = :ets.update_counter(tn, key, [{2, 1}, {3, 0}, {4, 1, 0, now}])
{:reply, {:ok, count}, state}
else
true = :ets.insert(tn, {key, 1, now, now})
{:reply, {:ok, 1}, state}
end
rescue
e ->
{:reply, {:error, e}, state}
end
end
def handle_call({:get_bucket, key}, _from, state) do
%{ets_table_name: tn} = state
try do
result =
case :ets.lookup(tn, key) do
[] ->
{:ok, nil}
[bucket] ->
{:ok, bucket}
end
{:reply, result, state}
rescue
e ->
{:reply, {:error, e}, state}
end
end
def handle_call({:delete_buckets, id}, _from, state) do
%{ets_table_name: tn} = state
# Compiled from:
# fun do {{bucket_number, bid},_,_,_} when bid == ^id -> true end
try do
count_deleted =
:ets.select_delete(tn, [{{{:"$1", :"$2"}, :_, :_, :_}, [{:==, :"$2", id}], [true]}])
{:reply, {:ok, count_deleted}, state}
rescue
e ->
{:reply, {:error, e}, state}
end
end
def handle_info(:prune, state) do
%{expiry_ms: expiry_ms, ets_table_name: tn} = state
now = Utils.timestamp()
expire_before = now - expiry_ms
:ets.select_delete(tn, [
{{:_, :_, :_, :"$1"}, [{:<, :"$1", expire_before}], [true]}
])
{:noreply, state}
end
end
|
lib/hammer/backend/ets.ex
| 0.832066
| 0.5144
|
ets.ex
|
starcoder
|
defmodule Astro.Earth do
@moduledoc """
Constants and astronomical calculations
related to the earth.
"""
alias Astro.Time
import Astro.Math, only: [to_radians: 1, to_degrees: 1, poly: 2, deg: 1, sin: 1]
@geometric_solar_elevation 90.0
@refraction 34.0 / 60.0
@solar_radius 16.0 / 60.0
@meters_per_kilometer 1000.0
@obliquity to_radians(23.4397)
@earth_radius 6_371.0072 # was 6356.9
@doc """
Returns an estimate of the effect of refraction
applied to the calculation of sunrise and
sunset times.
Sunrise actually occurs before the sun truly
reaches the horizon because earth's atmosphere
refracts the Sun's image. At the horizon, the average
amount of refraction is 34 arcminutes, though this
amount varies based on atmospheric conditions.
This effect is especially powerful for objects
that appear close to the horizon, such as the
rising or setting sun, because the light rays
enter the earth's atmosphere at a particularly
shallow angle. Because of refraction, the sun
may be seen for several minutes before it actually
rises in the morning and after it sets in the
evening.
"""
def refraction do
@refraction
end
@doc """
Returns the suns apparent radius at sunrise/sunset.
Unlike most other solar measurements, sunrise occurs
when the Sun's upper limb, rather than its center,
appears to cross the horizon. The apparent radius of
the Sun at the horizon is 16 arcminutes.
"""
def solar_radius do
@solar_radius
end
@doc """
Returns the radius of the earth in kilometers
"""
def earth_radius do
@earth_radius
end
@doc """
Returns the obliquity of the earth
"""
def obliquity do
@obliquity
end
@spec nutation(Time.julian_centuries()) :: Astro.angle()
def nutation(julian_centuries) do
a = poly(julian_centuries, Enum.map([124.90, -1934.134, 0.002063], °/1))
b = poly(julian_centuries, Enum.map([201.11, 72001.5377, 0.00057], °/1))
deg(-0.004778) * sin(a) + deg(-0.0003667) * sin(b)
end
@doc """
Adjusts the solar elevation to account
for the elevation of the requested location
## Arguments
* `elevation` is elevation in meters
## Returns
* The solar elevation angle adjusted for the elevation
"""
def elevation_adjustment(elevation) do
:math.acos(earth_radius() / (earth_radius() + elevation / @meters_per_kilometer))
|> to_degrees
end
@doc """
Adjusts the solar elevation to be the apparent angle
at sunrise if the requested angle is `:geometric`
(or 90°)
## Arguments
* `solar_elevation` is the requested solar elevation
in degress. It will be 90° for sunrise and sunset.
* `elevation` is elevation in meters
## Returns
* The solar elevation angle adjusted for refraction,
elevation and solar radius.
"""
def adjusted_solar_elevation(@geometric_solar_elevation = solar_elevation, elevation) do
solar_elevation + solar_radius() + refraction() + elevation_adjustment(elevation)
end
def adjusted_solar_elevation(_solar_elevation, _elevation) do
@geometric_solar_elevation
end
end
|
lib/astro/earth.ex
| 0.933764
| 0.902867
|
earth.ex
|
starcoder
|
defmodule Adventofcode.Day13PacketScanners do
alias Scanner
@enforce_keys [:scanners, :max_depth]
defstruct scanners: nil, max_depth: nil, depth: -1, caught_at_depth: [], delay: 0
def severity(input) do
input
|> parse()
|> new()
|> tick_repeatedly()
|> do_severity()
end
def minimum_delay(input) do
input
|> parse()
|> new()
|> do_minimum_delay()
end
defp do_severity(%{caught_at_depth: caught_at_depth, scanners: scanners}) do
scanners
|> Enum.filter(&(&1.depth in caught_at_depth))
|> Enum.map(&(&1.depth * &1.range))
|> Enum.sum()
end
defp do_minimum_delay(state) do
Enum.find_value(Stream.iterate(0, &(&1 + 1)), fn delay ->
case %{state | delay: delay} |> tick_until_caught_or_done() do
%{caught_at_depth: []} -> delay
%{caught_at_depth: [_depth]} -> nil
end
end)
end
defp tick_repeatedly(%{depth: max_depth, max_depth: max_depth} = state), do: state
defp tick_repeatedly(state) do
state
|> tick()
|> tick_repeatedly()
end
defp tick_until_caught_or_done(%{caught_at_depth: [_ | _]} = state), do: state
defp tick_until_caught_or_done(%{depth: max_depth, max_depth: max_depth} = state) do
state
end
defp tick_until_caught_or_done(state) do
state
|> tick()
|> tick_until_caught_or_done()
end
defp tick(%{delay: 0} = state) do
state
|> tick_player()
|> tick_scanners()
end
defp tick(%{delay: delay} = state) do
%{state | delay: 0}
|> fastforward_scanners(delay)
|> tick()
end
defmodule Scanner do
@enforce_keys [:depth, :range]
defstruct depth: nil, range: nil, position: 0, direction: 1
end
defp tick_player(%{depth: depth, scanners: scanners} = state) do
depth = depth + 1
caught_at_depth =
case Enum.find(scanners, &(&1.depth == depth and &1.position == 0)) do
nil -> state.caught_at_depth
_scanner -> state.caught_at_depth ++ [depth]
end
%{state | depth: depth, caught_at_depth: caught_at_depth}
end
defp tick_scanners(%{scanners: scanners} = state) do
%{state | scanners: Enum.map(scanners, &tick_scanner/1)}
end
defp tick_scanner(%{position: pos, direction: dir, range: range} = scanner) do
direction =
cond do
pos + dir < 0 -> 1
pos + dir >= range -> -1
true -> dir
end
%{scanner | position: pos + direction, direction: direction}
end
defp fastforward_scanners(%{scanners: scanners} = state, delay) do
%{state | scanners: Enum.map(scanners, &fastforward_scanner(&1, delay))}
end
defp fastforward_scanner(scanner, delay) do
distance = scanner.range - 1
case rem(scanner.position + delay, distance * 2) do
pos when pos > distance ->
%{scanner | direction: -1, position: trunc(abs(distance * 2 - pos))}
pos ->
%{scanner | direction: 1, position: pos}
end
end
defp new(scanners) do
max_depth = scanners |> Enum.map(& &1.depth) |> Enum.max()
%__MODULE__{scanners: scanners, max_depth: max_depth}
end
defp parse(input) do
input
|> String.trim_trailing()
|> String.split("\n")
|> Enum.map(&parse_line/1)
|> Enum.map(fn [depth, range] -> %Scanner{depth: depth, range: range} end)
end
defp parse_line(line) do
line
|> String.split(": ")
|> Enum.map(&String.to_integer/1)
end
end
|
lib/day_13_packet_scanners.ex
| 0.648132
| 0.634147
|
day_13_packet_scanners.ex
|
starcoder
|
% In Elixir, strings are represented as binaries. A binary is a bit
% string where its size is a multiple of eight. A BitString created
% conforming to this condition will be automatically mapped as String:
%
% <<72, 73, 74>>.__parent__ % => String
%
% ## `to_char_list`, `to_bin`, `to_s` and `to_str`
%
% There are four methods responsible for conversion from and to strings.
%
% Since Erlang represents strings as lists, `to_bin` and `to_char_list`
% has the sole purpose of converting from Erlang strings (char lists)
% to Elixir strings (binaries) and vice-versa. In general, those methods
% are only invoked when handling data from/to Erlang. Such methods should
% not be implemented in your own structures.
%
% On the other hand, `to_s` should return a string representation of
% a data structure while implementing `to_str` means that a structure
% could be used in any place a string would normaly be used.
%
% Notice that to_i and to_int follows exactly the same convention.
module String
module Behavior
% Returns a new string as a concatenation of the given *number*
% of the original string.
%
% ## Examples
%
% "foo" * 3 % => "foofoofoo"
%
def *(0)
""
end
def *(number)
Erlang.binary.copy(self, number)
end
% Retrieves a number that represents the given character.
%
% ## Examples
%
% "elixir"[3] % => 140
% "elixir"[-3] % => 140
%
def [](number)
if number < 0
Erlang.binary.at(self, Erlang.size(self) + number)
else
Erlang.binary.at(self, number)
end
end
% Slice the string in the given *start* and *length* arguments. If length
% is less than zero, it is the negative index to the end of the string.
%
% ## Examples
%
% "[1,2,3]"[0,3] % => "[1,"
% "[1,2,3]"[1,-2] % => "1,2,3"
%
def [](start, length)
if length < 0
Erlang.binary_part(self, start, Erlang.size(self) - start + length + 1)
else
Erlang.binary_part(self, start, length)
end
end
% Concatenate two strings.
%
% ## Examples
%
% "eli" + "xir" % => "elixir"
%
def +(another)
<<self|binary, another|binary>>
end
% Returns the length of the string. All strings parsed by the
% interpreter are handled as utf-8. Any I/O driver should be
% responsible to convert to utf-8.
%
% ## Examples
%
% "elixir".length % => 6
% "josé".length % => 4
%
def length
Erlang.size(self)
end
alias_local 'length, 'size, 0
% Check if the current string includes the given string.
%
% ## Examples
%
% true = "elixir".include?("el")
% false = "elixir".include?("ex")
%
def include?(string)
cl1 = to_char_list
cl2 = string.to_char_list
include?(cl1, cl2, cl1.length, cl2.length)
end
% Returns the index of the first occurence of the given substring or matching regex.
% Returns nil if nothing is found.
%
% ## Examples
%
% 1 = "hello".index('e')
% 3 = "hello".index('lo')
% nil = "hello".index('a')
%
def index(given)
if given.__module_name__ == 'Regexp::Behavior
case given.indexes(self)
match [{x,_}|_]
x
match nil
nil
end
else
result = Erlang.string.str(to_char_list, given.to_char_list)
case result
match 0 then nil
match _ then result - 1
end
end
end
% Returns the index of the first occurence of the given substring or matching regex.
% Returns nil if nothing is found.
%
% ## Examples
%
% 1 = "hello".count('e')
% 3 = "hello".count('lo')
% 0 = "hello".count('a')
%
def count(given)
count given.to_char_list, to_char_list, 0
end
% Substitute the first occurrence of *given* in the string by *replacement*.
% Please check `Regexp#replace` for more information about the characters
% allowed in *replacement*.
%
% ## Examples
%
% "abc" = "abc".sub(~r(d), "d")
% "adc" = "abc".sub(~r(b), "d")
% "a[b]c" = "abc".sub(~r(b), "[&]")
% "a[&]c" = "abc".sub(~r(b), "[\\&]")
% "a[b]c" = "abc".sub(~r[(b)], "[\\1]")
%
def sub(given, replacement)
if given.__module_name__ == 'Regexp::Behavior
given.replace(self, replacement)
else
Regexp.new(Regexp.escape(given)).replace(self, replacement)
end
end
% Substitute the **all** occurrence of *given* in the string by *replacement*.
% Please check `Regexp#replace` for more information about the characters
% allowed in *replacement*.
%
% ## Examples
%
% "abcbe" = "abcbe".gsub(~r(d), "d")
% "adcde" = "abcbe".gsub(~r(b), "d")
% "a[b]c[b]e" = "abcbe".gsub(~r(b), "[&]")
% "a[&]c[&]e" = "abcbe".gsub(~r(b), "[\\&]")
% "a[b]c[b]e" = "abcbe".gsub(~r[(b)], "[\\1]")
def gsub(given, replacement)
if given.__module_name__ == 'Regexp::Behavior
given.replace_all(self, replacement)
else
Regexp.new(Regexp.escape(given)).replace_all(self, replacement)
end
end
% Remove all space characters from the beginning and end of the string.
def strip
gsub(~r"\A\s*|\s*\z", "")
end
% Remove all space characters from the beginning of the string.
def lstrip
gsub(~r"\A\s*", "")
end
% Remove all space characters from the end of the string.
def rstrip
gsub(~r"\s*\z", "")
end
% Returns a string representation of this string.
%
% ## Examples
%
% "elixir".inspect % => "\"elixir\""
%
def inspect
list = Erlang.binary_to_list(self)
if Erlang.io_lib.printable_unicode_list(list)
<<$\", escape(list, [])|binary, $\">>
else
Erlang.io_lib.format($"~w", [self]).to_bin
end
end
% Receives a regular expression and split the string. An optional number
% of parts to split the string can be given. By default is the atom infinity.
%
% ## Examples
%
% ["foo", "baz", "bat"] = "foobarbazbarbat".split(~r"bar")
% ["foo", "bazbarbat"] = "foobarbazbarbat".split(~r"bar", 2)
%
def split(given, parts := 'infinity)
if given.__module_name__ == 'Regexp::Behavior
given.split(self, parts)
else
Regexp.new(Regexp.escape(given)).split(self, parts)
end
end
% Scan the whole string returning all matches.
%
% ## Examples
%
% "abc" = "key1=value1; key2=value2".scan(~r"(?:(\w+)=(\w+);?)")
%
def scan(given, offset := 0)
if given.__module_name__ == 'Regexp::Behavior
given.scan(self, offset)
else
Regexp.new(Regexp.escape(given)).scan(self, offset)
end
end
% Return a string with the last character removed. If the string end
% with \r\n then both characters are removed.
%
% ## Examples
%
% "foo".chop % => "fo"
% "foo\r\n".chop % => "foo"
% "foo\n\r".chop % => "foo\n"
% "x".chop.chop % => ""
%
def chop
sub(~r"(\r\n|.)\z", "")
end
% Returns true if the string is empty.
def empty?
Erlang.size(self) == 0
end
% Returns the string itself.
def to_bin
self
end
% Returns the string itself.
def to_s
self
end
% Returns the string itself.
def to_str
self
end
% Returns the list of chars represantion of this String.
def to_char_list
Erlang.binary_to_list(self)
end
def to_atom
Erlang.binary_to_atom(self, 'utf8)
end
% Returns a copy of the original string with all lowercase letters replaced with their uppercase counterparts.
def upcase
Erlang.string.to_upper(to_char_list).to_bin
end
% Returns a copy of the original string with all uppercase letters replaced with their lowercase counterparts.
def downcase
Erlang.string.to_lower(to_char_list).to_bin
end
% Returns a copy of the origin string with the first character converted to uppercase and the rest to lowercase.
def capitalize
[h|t] = Erlang.string.to_lower(to_char_list)
[Erlang.string.to_upper(h)|t].to_bin
end
% Returns a new string with the characters from original string in reverse order.
def reverse
to_char_list.reverse.to_bin
end
private
def escape([h|t], buffer)
char = case h
match $#, $\"
[$\\,h]
match $\b
[$\\,$b]
match $\d
[$\\,$d]
match $\e
[$\\,$e]
match $\f
[$\\,$f]
match $\n
[$\\,$n]
match $\r
[$\\,$r]
match $\t
[$\\,$t]
match $\v
[$\\,$v]
else
h
end
escape(t, [char|buffer])
end
def escape([], buffer)
Erlang.iolist_to_binary(Erlang.lists.reverse(buffer))
end
def count(items, [h|t], counter)
count items, t, count_each(h, items, counter)
end
def count(_items, [], counter)
counter
end
def count_each(item, [item|_], counter)
counter + 1
end
def count_each(item, [_|t], counter)
count_each item, t, counter
end
def count_each(_item, [], counter)
counter
end
def include?([], _, _, _)
false
end
def include?(original, compare, l1, l2)
if prefix(compare, original, l2, l1)
true
else
[_|t] = original
include? t, compare, l1 - 1, l2
end
end
def prefix(pre, string, l1, l2)
if l2 < l1
false
else
prefix(pre, string)
end
end
def prefix([h|pre], [h|string])
prefix(pre, string)
end
def prefix([], _)
true
end
def prefix(_, _)
false
end
end
end
|
lib/string.ex
| 0.715821
| 0.728658
|
string.ex
|
starcoder
|
defmodule AWS.Fis do
@moduledoc """
Fault Injection Simulator is a managed service that enables you to perform fault
injection experiments on your Amazon Web Services workloads.
For more information, see the [Fault Injection Simulator User Guide](https://docs.aws.amazon.com/fis/latest/userguide/).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2020-12-01",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "fis",
global?: false,
protocol: "rest-json",
service_id: "fis",
signature_version: "v4",
signing_name: "fis",
target_prefix: nil
}
end
@doc """
Creates an experiment template.
An experiment template includes the following components:
* **Targets**: A target can be a specific resource in your Amazon
Web Services environment, or one or more resources that match criteria that you
specify, for example, resources that have specific tags.
* **Actions**: The actions to carry out on the target. You can
specify multiple actions, the duration of each action, and when to start each
action during an experiment.
* **Stop conditions**: If a stop condition is triggered while an
experiment is running, the experiment is automatically stopped. You can define a
stop condition as a CloudWatch alarm.
For more information, see [Experiment templates](https://docs.aws.amazon.com/fis/latest/userguide/experiment-templates.html)
in the *Fault Injection Simulator User Guide*.
"""
def create_experiment_template(%Client{} = client, input, options \\ []) do
url_path = "/experimentTemplates"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Deletes the specified experiment template.
"""
def delete_experiment_template(%Client{} = client, id, input, options \\ []) do
url_path = "/experimentTemplates/#{AWS.Util.encode_uri(id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Gets information about the specified FIS action.
"""
def get_action(%Client{} = client, id, options \\ []) do
url_path = "/actions/#{AWS.Util.encode_uri(id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Gets information about the specified experiment.
"""
def get_experiment(%Client{} = client, id, options \\ []) do
url_path = "/experiments/#{AWS.Util.encode_uri(id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Gets information about the specified experiment template.
"""
def get_experiment_template(%Client{} = client, id, options \\ []) do
url_path = "/experimentTemplates/#{AWS.Util.encode_uri(id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Gets information about the specified resource type.
"""
def get_target_resource_type(%Client{} = client, resource_type, options \\ []) do
url_path = "/targetResourceTypes/#{AWS.Util.encode_uri(resource_type)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Lists the available FIS actions.
"""
def list_actions(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/actions"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Lists your experiment templates.
"""
def list_experiment_templates(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/experimentTemplates"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Lists your experiments.
"""
def list_experiments(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/experiments"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Lists the tags for the specified resource.
"""
def list_tags_for_resource(%Client{} = client, resource_arn, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Lists the target resource types.
"""
def list_target_resource_types(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/targetResourceTypes"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Starts running an experiment from the specified experiment template.
"""
def start_experiment(%Client{} = client, input, options \\ []) do
url_path = "/experiments"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Stops the specified experiment.
"""
def stop_experiment(%Client{} = client, id, input, options \\ []) do
url_path = "/experiments/#{AWS.Util.encode_uri(id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Applies the specified tags to the specified resource.
"""
def tag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Removes the specified tags from the specified resource.
"""
def untag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{AWS.Util.encode_uri(resource_arn)}"
headers = []
{query_params, input} =
[
{"tagKeys", "tagKeys"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Updates the specified experiment template.
"""
def update_experiment_template(%Client{} = client, id, input, options \\ []) do
url_path = "/experimentTemplates/#{AWS.Util.encode_uri(id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:patch,
url_path,
query_params,
headers,
input,
options,
200
)
end
end
|
lib/aws/generated/fis.ex
| 0.757615
| 0.438364
|
fis.ex
|
starcoder
|
defmodule OliWeb.Qa.Utils do
use Phoenix.HTML
def warning_icon(type) do
case type do
"accessibility" -> {"not_accessible", "#ffd868"}
"content" -> {"image_not_supported", "#ffa351ff"}
"pedagogy" -> {"batch_prediction", "#ffbe7bff"}
_ -> {"warning", "#eed971ff"}
end
|> icon()
|> raw()
end
def icon({name, color}) do
~s|<i style="color: #{color}" class="material-icons-outlined icon">#{name}</i>|
end
def warning_selected?(selected, warning) do
case selected == warning do
true -> " active"
false -> ""
end
end
def explanatory_text(subtype, context \\ %{}) do
case subtype do
"missing alt text" ->
"""
<p>
Providing alternative text for non-text content such as images and videos enables users
with visual impairments to understand the reason and context for the provided content.
</p>
<p>
For more information on the importance of providing contextual alternative text to non-text content, see the
<a href="https://webaim.org/techniques/alttext/#basics" target="_blank">alt text accessibility guide</a> on WebAIM.org.
</p>
"""
"nondescriptive link text" ->
"""
<p>
Links are more useful to users when they are provided with descriptive context instead of a raw URL or
generic text such as "click here" or "learn more."
</p>
<p>
For more information on the importance of providing textual context for links, see the
<a href="https://webaim.org/techniques/hypertext/link_text#text" target="_blank">Link text accessibility guide</a> on WebAIM.org.
</p>
"""
"broken remote resource" ->
"""
<p>
A link or an image hosted on another website was not able to be found. This might be a temporary problem, or it could
mean the link or image path is broken and needs to be updated.
</p>
"""
"no attached objectives" ->
"""
<p>
One of the Open Learning Initiative's core features is providing analytics on course content to identify
areas in the course that can be improved. This only works when pages and activities have objectives attached to them.
</p>
<p>
You can publish a course without linking objectives to course content, but no analytics will be generated for this content.
</p>
<p>
For more information on the importance of attaching learning objectives to pages and activities, see the
<a href="https://www.cmu.edu/teaching/designteach/design/learningobjectives.html" target="_blank">guide on learning objectives</a> from the CMU Eberly Center.
</p>
"""
"no attached activities" ->
case context[:graded] do
true ->
"""
<p>
This graded page does not have any attached activities. In order to provide a grade, the page must have at least one activity.
</p>
"""
_ ->
"""
<p>
This page does not provide any practice opportunities in the form of activities for the material students may have learned on the page.
That's fine for introductory or conclusory pages, but pages with learning content should generally provide practice opportunities.
</p>
<p>
For more information on the importance of providing practice opportunities in pages, see the
<a href="https://www.cmu.edu/teaching/designteach/design/assessments.html" target="_blank">guide on assessments</a> from the CMU Eberly Center.
</p>
"""
end
_ ->
""
end
|> raw()
end
def action_item(subtype, context \\ %{}) do
case subtype do
"missing alt text" ->
"""
<p>Add alternative text to this content</p>
"""
"nondescriptive link text" ->
"""
<p>Provide more descriptive text for this link</p>
"""
"broken remote resource" ->
"""
<p>Check to make sure this link or image is not broken</p>
"""
"no attached objectives" ->
"""
<p>Attach a learning objective to this page or activity</p>
"""
"no attached activities" ->
case context[:graded] do
true ->
"""
<p>Add an activity to this page or change it to an ungraded page</p>
"""
_ ->
"""
<p>Consider adding an activity to this page if it provides learning content</p>
"""
end
_ ->
"""
<p>This content has an issue</p>
"""
end
|> raw()
end
end
|
lib/oli_web/live/qa/utils.ex
| 0.63023
| 0.427367
|
utils.ex
|
starcoder
|
defmodule Absinthe.Traversal do
# Graph traversal utilities for dealing with ASTs and schemas using the
# `Absinthe.Traversal.Node` protocol.
# Note this really only exists to handle some Schema rules stuff and is generally
# considered legacy code. See `Absinthe.Blueprint.Transform` for information
# on how to walk blueprint trees.
@moduledoc false
alias __MODULE__
alias Absinthe.Traversal.Node
@type t :: %__MODULE__{context: any, seen: [Node.t()], path: [Node.t()]}
defstruct context: nil, seen: [], path: []
# Instructions defining behavior during traversal
# * `{:ok, value, traversal}`: The value of the node is `value`, and traversal
# should continue to children (using `traversal`)
# * `{:prune, value, traversal}`: The value of the node is `value` and
# traversal should NOT continue to children, but to siblings (using
# `traversal`)
# * `{:error, message}`: Bad stuff happened, explained by `message`
@type instruction_t :: {:ok, any, t} | {:prune, any, t} | {:error, any}
# Traverse, reducing nodes using a given function to evaluate their value.
@doc false
@spec reduce(Node.t(), any, acc, (Node.t(), t, acc -> instruction_t)) :: acc when acc: var
def reduce(node, context, initial_value, node_evaluator) do
{result, _traversal} =
do_reduce(node, %Traversal{context: context}, initial_value, node_evaluator)
result
end
# Reduce using a traversal struct
@spec do_reduce(Node.t(), t, acc, (Node.t(), t, acc -> instruction_t)) :: {acc, t} when acc: var
defp do_reduce(node, traversal, initial_value, node_evaluator) do
if seen?(traversal, node) do
{initial_value, traversal}
else
case node_evaluator.(node, traversal, initial_value) do
{:ok, value, next_traversal} ->
reduce_children(node, next_traversal |> put_seen(node), value, node_evaluator)
{:prune, value, next_traversal} ->
{value, next_traversal |> put_seen(node)}
end
end
end
# Traverse a node's children
@spec reduce_children(Node.t(), t, acc, (Node.t(), t, acc -> instruction_t)) :: {acc, t}
when acc: var
defp reduce_children(node, traversal, initial, node_evalator) do
Enum.reduce(Node.children(node, traversal), {initial, traversal}, fn child,
{this_value,
this_traversal} ->
do_reduce(child, this_traversal, this_value, node_evalator)
end)
end
@spec seen?(t, Node.t()) :: boolean
defp seen?(traversal, node), do: traversal.seen |> Enum.member?(node)
@spec put_seen(t, Node.t()) :: t
defp put_seen(traversal, node) do
%{traversal | seen: [node | traversal.seen]}
end
end
|
lib/absinthe/traversal.ex
| 0.878621
| 0.613425
|
traversal.ex
|
starcoder
|
defmodule Braintree.Subscription do
@moduledoc """
Manage customer subscriptions to recurring billing plans.
For additional reference see:
https://developers.braintreepayments.com/reference/request/subscription/create/ruby
"""
use Braintree.Construction
alias Braintree.{HTTP, Transaction, AddOn, Search}
alias Braintree.ErrorResponse, as: Error
@type t :: %__MODULE__{
id: String.t(),
plan_id: String.t(),
balance: String.t(),
billing_day_of_month: String.t(),
billing_period_end_date: String.t(),
billing_period_start_date: String.t(),
created_at: String.t(),
current_billing_cycle: String.t(),
days_past_due: String.t(),
descriptor: String.t(),
failure_count: String.t(),
first_billing_date: String.t(),
merchant_account_id: String.t(),
never_expires: String.t(),
next_bill_amount: String.t(),
next_billing_date: String.t(),
next_billing_period_amount: String.t(),
number_of_billing_cycles: String.t(),
paid_through_date: String.t(),
payment_method_token: String.t(),
price: String.t(),
status: String.t(),
trial_duration: String.t(),
trial_duration_unit: String.t(),
trial_period: String.t(),
updated_at: String.t(),
add_ons: [AddOn.t()],
discounts: [any],
transactions: [Transaction.t()],
status_history: [any]
}
defstruct id: nil,
plan_id: nil,
balance: nil,
billing_day_of_month: nil,
billing_period_end_date: nil,
billing_period_start_date: nil,
created_at: nil,
current_billing_cycle: nil,
days_past_due: nil,
descriptor: nil,
failure_count: nil,
first_billing_date: nil,
merchant_account_id: nil,
never_expires: nil,
next_bill_amount: nil,
next_billing_date: nil,
next_billing_period_amount: nil,
number_of_billing_cycles: nil,
paid_through_date: nil,
payment_method_token: nil,
price: nil,
status: nil,
trial_duration: nil,
trial_duration_unit: nil,
trial_period: nil,
updated_at: nil,
add_ons: [],
discounts: [],
transactions: [],
status_history: []
@doc """
Create a subscription, or return an error response with after failed
validation.
## Example
{:ok, sub} = Braintree.Subscription.create(%{
payment_method_token: card.token,
plan_id: "starter"
})
"""
@spec create(map, Keyword.t()) :: {:ok, t} | {:error, Error.t()}
def create(params \\ %{}, opts \\ []) do
with {:ok, payload} <- HTTP.post("subscriptions", %{subscription: params}, opts) do
{:ok, new(payload)}
end
end
@doc """
Find an existing subscription by `subscription_id`
## Example
{:ok, subscription} = Subscription.find("123")
"""
@spec find(String.t(), Keyword.t()) :: {:ok, t} | {:error, Error.t()}
def find(subscription_id, opts \\ []) do
with {:ok, payload} <- HTTP.get("subscriptions/#{subscription_id}", opts) do
{:ok, new(payload)}
end
end
@doc """
Cancel an existing subscription by `subscription_id`. A cancelled subscription
cannot be reactivated, you would need to create a new one.
## Example
{:ok, subscription} = Subscription.cancel("123")
"""
@spec cancel(String.t(), Keyword.t()) :: {:ok, t} | {:error, Error.t()}
def cancel(subscription_id, opts \\ []) do
with {:ok, payload} <- HTTP.put("subscriptions/#{subscription_id}/cancel", opts) do
{:ok, new(payload)}
end
end
@doc """
You can manually retry charging past due subscriptions.
By default, we will use the subscription balance when retrying the
transaction. If you would like to use a different amount you can optionally
specify the amount for the transaction.
A successful manual retry of a past due subscription will **always** reduce
the balance of that subscription to $0, regardless of the amount of the
retry.
## Example
{:ok, transaction} = Braintree.Subscription.retry_charge(sub_id)
{:ok, transaction} = Braintree.Subscription.retry_charge(sub_id, "24.00")
"""
@spec retry_charge(String.t()) :: {:ok, Transaction.t()}
@spec retry_charge(String.t(), String.t() | nil, Keyword.t()) ::
{:ok, Transaction.t()} | {:error, Error.t()}
def retry_charge(subscription_id, amount \\ nil, opts \\ []) do
Transaction.sale(%{amount: amount, subscription_id: subscription_id}, opts)
end
@doc """
To update a subscription, use its ID along with new attributes. The same
validations apply as when creating a subscription. Any attribute not passed will
remain unchanged.
## Example
{:ok, subscription} = Braintree.Subscription.update("subscription_id", %{
plan_id: "new_plan_id"
})
subscription.plan_id # "new_plan_id"
"""
@spec update(binary, map, Keyword.t()) :: {:ok, t} | {:error, Error.t()}
def update(id, params, opts \\ []) when is_binary(id) and is_map(params) do
with {:ok, payload} <- HTTP.put("subscriptions/" <> id, %{subscription: params}, opts) do
{:ok, new(payload)}
end
end
@doc """
To search for subscriptions, pass a map of search parameters.
## Example:
{:ok, subscriptions} = Braintree.Subscription.search(%{plan_id: %{is: "starter"}})
"""
@spec search(map, Keyword.t()) :: {:ok, t} | {:error, Error.t()}
def search(params, opts \\ []) when is_map(params) do
Search.perform(params, "subscriptions", &new/1, opts)
end
@doc """
Convert a map into a Subscription struct. Add_ons and transactions
are converted to a list of structs as well.
## Example
subscripton = Braintree.Subscription.new(%{"plan_id" => "business",
"status" => "Active"})
"""
@spec new(map | [map]) :: t | [t]
def new(%{"subscription" => map}) do
new(map)
end
def new(map) when is_map(map) do
subscription = super(map)
add_ons = AddOn.new(subscription.add_ons)
transactions = Transaction.new(subscription.transactions)
%{subscription | add_ons: add_ons, transactions: transactions}
end
def new(list) when is_list(list) do
Enum.map(list, &new/1)
end
end
|
lib/subscription.ex
| 0.906117
| 0.411318
|
subscription.ex
|
starcoder
|
defmodule CLL do
@moduledoc """
This module can be used to represent a data structure with similar behavior
as circular Doubly-Linked-List.
"But wait, aren't all Lists in Erlang Linked Lists?" Well yes, but they
are immutable, which makes things like removing elements while iterating
through the list very slow. Also, getting consistent CLL-like behaviour
from normal Lists is not easy when dealing with problems such as polygon
math around the beginning and end of the list.
Internally, it uses a Zipper data structure
(https://en.wikipedia.org/wiki/Zipper_(data_structure))
to keep the items before and
after the current item in a way that optimizes for moving forward and
backward in the list. Because the next and previous item are always the
first items in the surrounding lists, those operations are substantially
faster than tracking a cursor in a standar List an fetching its neighbors.
A list can be created by passing a List to the `init/2` function along with
an boolean defining if the resulting Doubly-Linked-List is circular or not.
Once created, you can traverse through the list one or more steps at a time.
## Examples
iex> [1, 2, 3, 4, 5]
...> |> CLL.init()
...> |> CLL.value()
1
iex> [1, 2, 3, 4, 5]
...> |> CLL.init()
...> |> CLL.next()
...> |> CLL.value()
2
iex> [1, 2, 3, 4, 5]
...> |> CLL.init()
...> |> CLL.prev()
...> |> CLL.prev(3)
...> |> CLL.next(2)
...> |> CLL.value()
4
You can also modify the list by inserting, replacing, or removing the current
element. Finally, if desired, you can convert the CLL back into a List.
## Examples
iex> CLL.init([1, 2, 3, 4, 5])
...> |> CLL.next(2)
...> |> CLL.remove()
...> |> CLL.to_list()
[1, 2, 4, 5]
iex> CLL.init([1, 2, 3, 4, 5])
...> |> CLL.prev(2)
...> |> CLL.replace(:foo)
...> |> CLL.to_list()
[1, 2, 3, :foo, 5]
iex> CLL.init([1, 2, 3, 4, 5])
...> |> CLL.next(3)
...> |> CLL.insert(3.5)
...> |> CLL.insert(3.75)
...> |> CLL.to_list()
[1, 2, 3, 3.5, 3.75, 4, 5]
To help with use cases where iterating through the list once is useful, CLL
keeps track of the "start" of the list so that you can determine when a list
has been fully traversed. A list can also be reset to the initial start
position at any time.
## Examples
iex> CLL.init([1, 2, 3, 4, 5])
...> |> CLL.next(3)
...> |> CLL.prev(2)
...> |> CLL.next()
...> |> CLL.offset()
2
iex> CLL.init([1, 2, 3, 4, 5])
...> |> CLL.next(5)
...> |> CLL.done?()
true
iex> CLL.init([1, 2, 3, 4, 5])
...> |> CLL.next(4)
...> |> CLL.reset()
...> |> CLL.value()
1
"""
@type cll :: {list, list}
@type value :: any
@spec init(list) :: cll
def init(list) when is_list(list), do: {[], list}
@spec next(cll) :: cll
def next({[], []}), do: {[], []}
def next({visited, []}) do
[a | remain] = Enum.reverse(visited)
{[a], remain}
end
def next({visited, [a | remain]}), do: {[a] ++ visited, remain}
@spec next(cll, number) :: cll
def next(state, 0), do: state
def next(state, 1), do: next(state)
def next(state, offset) when offset < 0, do: prev(state, -offset)
def next(state, offset), do: next(next(state), offset - 1)
@spec prev(cll) :: cll
def prev({[], []}), do: {[], []}
def prev({[], remain}), do: {Enum.reverse(remain), []} |> prev()
def prev({[a | visited], remain}), do: {visited, [a] ++ remain}
@spec prev(cll, number) :: cll
def prev(state, 0), do: state
def prev(state, 1), do: prev(state)
def prev(state, offset) when offset < 0, do: next(state, -offset)
def prev(state, offset), do: prev(prev(state), offset - 1)
@spec value(cll) :: any
@spec value(cll, number) :: any
def value(state, offset \\ 0)
def value({[], []}, _), do: nil
def value({_, remain} = state, offset) when offset >= length(remain),
do: value(state, offset - len(state))
def value({visited, _} = state, offset) when offset < -length(visited),
do: value(state, offset + len(state))
def value({visited, _}, offset) when offset < 0, do: Enum.at(visited, -offset - 1)
def value({_, remain}, offset), do: Enum.at(remain, offset)
@spec len(cll) :: non_neg_integer
def len({visited, remain}), do: length(visited) + length(remain)
@spec empty?(cll) :: boolean
def empty?({[], []}), do: true
def empty?(_), do: false
@spec offset(cll) :: non_neg_integer
def offset({visited, _}), do: length(visited)
@spec done?(cll) :: boolean
def done?({_, []}), do: true
def done?(_), do: false
@spec reset(cll) :: cll
def reset({_, _} = state) do
state |> to_list() |> init()
end
@spec remove(cll) :: cll
def remove({[], []}), do: {[], []}
def remove({visited, []}), do: {Enum.drop(visited, -1), []}
def remove({visited, [_ | remain]}), do: {visited, remain}
@spec insert(cll, any) :: cll
def insert({visited, remain}, value), do: {[value | visited], remain}
@spec replace(cll, any) :: cll
def replace({[], []}, _), do: {[], []}
def replace({visited, []}, value), do: {Enum.drop(visited, -1) ++ [value], []}
def replace({visited, [_ | remain]}, value), do: {visited, [value | remain]}
@spec to_list(cll) :: list
def to_list({visited, remain}), do: visited |> Enum.reverse() |> Enum.concat(remain)
end
|
lib/cll.ex
| 0.876218
| 0.599866
|
cll.ex
|
starcoder
|
defmodule Plymio.Fontais do
@moduledoc ~S"""
`Plymio.Fontais` is a foundation / support package for the `Plymio` and `Harnais` package families.
## Documentation Terms
In the documentation these terms, usually in *italics*, are used to mean the same thing (e.g. *opts*).
### *opts* and *opzioni*
*opts* is a `Keyword` list. An *opzioni* is a list of *opts*.
### *form* and *forms*
A *form* is an quoted form (ast). A *forms* is a list of zero, one or more *form*s.
### *result* and *results*
A result is either `{:ok, any}` or `{:error, error}` where `error` is an `Exception`.
A *results* is an enumerable of *result* e.g. a `List` or `Stream`.
## Standard Processing and Result Patterns
Many functions return either `{:ok, value}` or `{:error, error}`
where `error` will be an `Exception`.
Peer bang functions return either the `value` or raises the `error`.
There are three common function *patterns*:
### *pattern 0*
A *pattern 0* function always returns either `{:ok, any}` or
`{:error, error}`.
If the function's processing (e.g. after calling other functions)
does not produce either `{:ok, value}` or `{:error,
error}`, a new `{:error, error}` will be created and returned.
A *pattern 0* function is *pessimistic* i.e it assumes the `value` is invalid, and
an error has occured.
### *pattern 1*
A *pattern 1* function always returns either `{:ok, any}` or
`{:error, error}`.
If the function's processing (e.g. after calling other functions)
produces a `value` that is neither `{:ok, any}` nor `{:error,
error}`, the `value` is converted into `{:ok, value}` and returned.
A *pattern 1* function is *optimistic* i.e it assumes the
`value` is valid.
### *pattern 2*
A *pattern 2* function always returns either `{:ok, any}`, `{:error,
error}`, `nil` or *the unset value*.
See below for an explanation of *the unset value*.
A *pattern 2* function works like *pattern 1* other than if
the `value` is `nil` or *the unset value* (see below), it is
returned unchanged.
## The Unset Value
In many situations it is useful to know whether a var has been set
explicity but its value can be validly `nil`.
For example the default default value for a `struct` field is
`nil`. But there is no way to determine whether the field has been
set to `nil` or has never been set at all.
*The Unset Value* is an arbitrary, randomish atom that can be used
where `nil` can not e.g. as the default value for a field in a
`struct`.
See `Plymio.Fontais.Guard.the_unset_value/0`.
"""
require Plymio.Fontais.Guard
use Plymio.Fontais.Attribute
@type form :: Macro.t()
@type forms :: [form]
@type key :: atom
@type keys :: key | [key]
@type alias_key :: key
@type alias_keys :: keys
@type alias_value :: nil | alias_keys
@type aliases_kvs :: [{alias_key, alias_value}]
@type aliases_tuples :: [{alias_key, alias_key}]
@type aliases_dict :: %{optional(alias_key) => alias_key}
@type kv :: {any, any}
@type product :: [kv]
@type opts :: Keyword.t()
@type opzioni :: [opts]
@type error :: struct
@type result :: {:ok, any} | {:error, error}
@type results :: [result]
@type dict :: %{optional(alias_key) => any}
@type fun1_map :: (any -> any)
@doc "Delegated to `Plymio.Fontais.Guard.the_unset_value/0`"
@since "0.1.0"
@spec the_unset_value() :: atom
defdelegate the_unset_value(), to: Plymio.Fontais.Guard
@doc "Delegated to `Plymio.Fontais.Guard.is_value_set/1`"
@since "0.1.0"
@spec is_value_set(any) :: boolean
defdelegate is_value_set(value), to: Plymio.Fontais.Guard
@doc "Delegated to `Plymio.Fontais.Guard.is_value_unset/1`"
@since "0.1.0"
@spec is_value_unset(any) :: boolean
defdelegate is_value_unset(value), to: Plymio.Fontais.Guard
@doc "Delegated to `Plymio.Fontais.Guard.is_value_unset_or_nil/1`"
@since "0.1.0"
@spec is_value_unset_or_nil(any) :: boolean
defdelegate is_value_unset_or_nil(value), to: Plymio.Fontais.Guard
end
|
lib/fontais/fontais.ex
| 0.878731
| 0.744912
|
fontais.ex
|
starcoder
|
defmodule Logi.Sink do
@moduledoc """
Sinks.
A sink has the specification of a sink process (see `Logi.SinkProc`).
A sink process manages a sink writer (See `Logi.SinkWriter`).
See the documentations of `Logi.BuiltIn.Sink.*` modules for usage examples.
"""
@typedoc "A sink"
@opaque sink :: :logi_sink.sink
@typedoc """
The identifier of a sink.
The scope of an identifier is limited in siblings with the same parent.
"""
@type id :: any
@typedoc """
The specification of a sink process.
See Erlang official documents of `:supervisor` for more information.
NOTE: `:restart` field is ignored (always regarded as `:permanent`).
"""
@type spec :: :supervisor.child_spec
@typedoc """
The supervise flags of a sink process.
See Erlang official documents of `:supervisor` for more information.
NOTE: `:strategy` field is ignored.
"""
@type sup_flags :: :supervisor.sup_flags
@doc "Creates a new sink."
@spec new(spec, sup_flags) :: sink
def new(spec, flags \\ %{}) do
:logi_sink.new spec, flags
end
@doc """
Creates a sink from standalone a writer instance.
No specific sink process is needed by `writer` to write log messages.
"""
@spec from_writer(Logi.Sink.id, Logi.SinkWriter.writer) :: sink
def from_writer(sink_id, writer) do
:logi_sink.from_writer sink_id, writer
end
@doc "Returns `true` if `x` is a `t:sink/0` value, `false` otherwise."
@spec sink?(any) :: boolean
def sink?(x) do
:logi_sink.is_sink x
end
@doc "Equivalent to `Logi.Sink.get_spec(sink)[:id]`."
@spec get_id(sink) :: id
def get_id(sink) do
:logi_sink.get_id sink
end
@doc """
Gets the process specification of `sink`.
The type of the return value is always map.
"""
@spec get_spec(sink) :: spec
def get_spec(sink) do
:logi_sink.get_spec sink
end
@doc """
Gets the supervise flags of `sink`.
The type of the return value is always map.
"""
@spec get_sup_flags(sink) :: sup_flags
def get_sup_flags(sink) do
:logi_sink.get_sup_flags sink
end
end
|
lib/logi/sink.ex
| 0.843992
| 0.460956
|
sink.ex
|
starcoder
|
defmodule Oli.Delivery.ExtrinsicState do
@moduledoc """
Enables arbitrary key-value pair storage that is extrinsic to any specific activity attempt.
Extrinsic state exists either truly global for a user, or scoped to a course section for
a user. Leveraging this, activities can be built that share state across pages in a course, and
across courses.
The fundamental operations on extrinsic state are read, upsert, and deletion. Each operation
works on a collection of keys (or key value pairs).
"""
import Ecto.Query, warn: false
alias Oli.Accounts
alias Oli.Delivery.Sections
alias Oli.Delivery.Attempts.Core, as: Attempts
alias Phoenix.PubSub
@doc """
Reads extrinsic state for a user for a specific section. Returns {:ok, map} of the keys and their
values.
The optional `keys` parameter is a MapSet of the string key names to retrieve. If this
argument is not specified then all keys are returned, otherwise the return value is a map of
key value pairs filtered to this MapSet.
"""
def read_section(user_id, section_slug, keys \\ nil) do
case Sections.get_enrollment(section_slug, user_id) do
nil ->
{:error, {:not_found}}
e ->
{:ok, filter_keys(e.state, keys)}
end
end
@doc """
Reads extrinsic state for a user from the global context. Returns {:ok, map} of the keys and their
values.
The optional `keys` parameter is a MapSet of the string key names to retrieve. If this
argument is not specified then all keys are returned, otherwise the return value is a map of
key value pairs filtered to this MapSet.
"""
def read_global(user_id, keys \\ nil) do
case Accounts.get_user_by(id: user_id) do
nil -> {:error, {:not_found}}
user -> {:ok, filter_keys(user.state, keys)}
end
end
@doc """
Reads extrinsic state for a user from a resource attempt context. Returns {:ok, map} of the keys and their
values.
The optional `keys` parameter is a MapSet of the string key names to retrieve. If this
argument is not specified then all keys are returned, otherwise the return value is a map of
key value pairs filtered to this MapSet.
"""
def read_attempt(attempt_guid, keys \\ nil) do
case Attempts.get_resource_attempt_by(attempt_guid: attempt_guid) do
nil -> {:error, {:not_found}}
attempt -> {:ok, filter_keys(attempt.state, keys)}
end
end
@doc """
Updates or inserts key value pairs into the extrinsic state for a user for a particular section.
Returns {:ok, map} of the new updated state.
"""
def upsert_section(user_id, section_slug, key_values) do
case Sections.get_enrollment(section_slug, user_id) do
nil ->
{:error, {:not_found}}
e ->
case Sections.update_enrollment(e, %{state: Map.merge(e.state, key_values)}) do
{:ok, u} ->
notify_section(user_id, section_slug, :delta, key_values)
{:ok, u.state}
e ->
e
end
end
end
@doc """
Updates or inserts key value pairs into the extrinsic state for a user for the global context.
Returns {:ok, map} of the new updated state.
"""
def upsert_global(user_id, key_values) do
case Accounts.get_user_by(id: user_id) do
nil ->
{:error, {:not_found}}
user ->
case Accounts.update_user(user, %{state: Map.merge(user.state, key_values)}) do
{:ok, u} ->
notify_global(user_id, :delta, key_values)
{:ok, u.state}
e ->
e
end
end
end
@doc """
Updates or inserts key value pairs into the extrinsic state for a user for an attempt context.
Returns {:ok, map} of the new updated state.
"""
def upsert_atttempt(attempt_guid, key_values) do
case Attempts.get_resource_attempt_by(attempt_guid: attempt_guid) do
nil ->
{:error, {:not_found}}
attempt ->
case Attempts.update_resource_attempt(attempt, %{
state: Map.merge(attempt.state, key_values)
}) do
{:ok, u} ->
{:ok, u.state}
e ->
e
end
end
end
@doc """
Deletes one or more keys from the extrinsic state for a user for the global context. The
keys are specified as a MapSet of string key names.
Returns {:ok, map} of the new updated state.
"""
def delete_global(user_id, keys) do
case Accounts.get_user_by(id: user_id) do
nil ->
{:error, {:not_found}}
user ->
case Accounts.update_user(user, %{state: delete_keys(user.state, keys)}) do
{:ok, u} ->
notify_global(user_id, :deletion, MapSet.to_list(keys))
{:ok, u.state}
e ->
e
end
end
end
@doc """
Deletes one or more keys from the extrinsic state for a user for an attempt context. The
keys are specified as a MapSet of string key names.
Returns {:ok, map} of the new updated state.
"""
def delete_attempt(attempt_guid, keys) do
case Attempts.get_resource_attempt_by(attempt_guid: attempt_guid) do
nil ->
{:error, {:not_found}}
attempt ->
case Attempts.update_resource_attempt(attempt, %{state: delete_keys(attempt.state, keys)}) do
{:ok, u} ->
{:ok, u.state}
e ->
e
end
end
end
@doc """
Deletes one or more keys from the extrinsic state for a user for a particular section. The
keys are specified as a MapSet of string key names.
Returns {:ok, map} of the new updated state.
"""
def delete_section(user_id, section_slug, keys) do
case Sections.get_enrollment(section_slug, user_id) do
nil ->
{:error, {:not_found}}
e ->
case Sections.update_enrollment(e, %{state: delete_keys(e.state, keys)}) do
{:ok, u} ->
notify_section(user_id, section_slug, :deletion, MapSet.to_list(keys))
{:ok, u.state}
e ->
e
end
end
end
defp filter_keys(state, nil), do: state
defp filter_keys(state, keys) do
Map.keys(state)
|> Enum.reduce(%{}, fn k, m ->
if MapSet.member?(keys, k) do
Map.put(m, k, Map.get(state, k))
else
m
end
end)
end
defp delete_keys(state, keys) do
Map.keys(state)
|> Enum.reduce(%{}, fn k, m ->
if MapSet.member?(keys, k) do
m
else
Map.put(m, k, Map.get(state, k))
end
end)
end
defp notify_global(user_id, action, payload) do
PubSub.broadcast(
Oli.PubSub,
"user_global_state:" <> Integer.to_string(user_id),
{action, payload}
)
end
defp notify_section(user_id, section_slug, action, payload) do
PubSub.broadcast(
Oli.PubSub,
"user_section_state:" <> section_slug <> ":" <> Integer.to_string(user_id),
{action, payload}
)
end
end
|
lib/oli/delivery/extrinsic_state.ex
| 0.887021
| 0.53206
|
extrinsic_state.ex
|
starcoder
|
defmodule P2 do
@moduledoc """
## Examples
"""
use Bitwise, only_operators: true
defmodule Factorization do
defstruct value: 2
def next(factor \\ %__MODULE__{value: 2})
def next(%__MODULE__{value: 2}), do: %__MODULE__{value: 3}
def next(%__MODULE__{value: n}) do
if is_prime(n + 2) do
%__MODULE__{value: n + 2}
else
next(%__MODULE__{value: n + 2})
end
end
@doc """
素数を判定する。
# Examples
iex> P2.is_prime(1)
false
iex> [2, 3, 5, 7, 11, 13, 17, 19]
...> |> Enum.map(&P2.is_prime/1)
[true, true, true, true, true, true, true, true]
iex> P2.is_prime(4)
false
iex> P2.is_prime(24)
false
iex> P2.is_prime(58)
false
"""
def is_prime(n)
def is_prime(n) when n < 2, do: false
def is_prime(2), do: true
def is_prime(3), do: true
def is_prime(n) when 3 < n do
if rem(n, 2) == 0 do
false
else
is_prime(n, 3)
end
end
defp is_prime(n, i) when i * i <= n do
if rem(n, i) == 0 do
false
else
is_prime(n, i + 2)
end
end
defp is_prime(_, _), do: true
def to_list(n, factor \\ %Factorization{}, combinations \\ [])
def to_list(1, _, combinations), do: combinations
def to_list(n, %{value: prime} = factor, combinations) do
if Factorization.is_prime(n) do
[{n, 1} | combinations]
else
if rem(n, prime) == 0 do
{n0, base} = to_exponent(n, prime)
to_list(n0, Factorization.next(factor), [{prime, base} | combinations])
else
to_list(n, Factorization.next(factor), combinations)
end
end
end
defp to_exponent(n, factor, count \\ 0)
defp to_exponent(n, factor, count) do
if rem(n, factor) == 0 do
to_exponent(div(n, factor), factor, count + 1)
else
{n, count}
end
end
end
def main do
IO.read(:line) |> String.trim() |> String.to_integer() |> solve() |> IO.puts()
end
@doc """
素因数分解をして、その冪指数に対して、Nimゲームを行う.
## Examples
iex> P2.solve(4)
"Alice"
iex> P2.solve(11)
"Alice"
iex> P2.solve(24)
"Alice"
iex> P2.solve(600)
"Bob"
"""
def solve(n) when 2 <= n and n <= 100_000_000 do
n
|> Factorization.to_list()
|> Enum.reduce(nil, fn
{_prime_number, number}, nil ->
number
{_prime_number, number}, acc ->
acc ^^^ number
end)
|> Kernel.==(0)
|> if do
"Bob"
else
"Alice"
end
end
end
"""
defmodule Main do
use Bitwise, only_operators: true
defmodule Factorization do
defstruct value: 2
def next(factor \\ %__MODULE__{value: 2})
def next(%__MODULE__{value: 2}), do: %__MODULE__{value: 3}
def next(%__MODULE__{value: n}) do
if is_prime(n + 2) do
%__MODULE__{value: n + 2}
else
next(%__MODULE__{value: n + 2})
end
end
def is_prime(n)
def is_prime(n) when n < 2, do: false
def is_prime(2), do: true
def is_prime(3), do: true
def is_prime(n) when 3 < n do
if rem(n, 2) == 0 do
false
else
is_prime(n, 3)
end
end
defp is_prime(n, i) when i * i <= n do
if rem(n, i) == 0 do
false
else
is_prime(n, i + 2)
end
end
defp is_prime(_, _), do: true
def to_list(n, factor \\ %Factorization{}, combinations \\ [])
def to_list(1, _, combinations), do: combinations
def to_list(n, %{value: prime} = factor, combinations) do
if Factorization.is_prime(n) do
[{n, 1} | combinations]
else
if rem(n, prime) == 0 do
{n0, base} = to_exponent(n, prime)
to_list(n0, Factorization.next(factor), [{prime, base} | combinations])
else
to_list(n, Factorization.next(factor), combinations)
end
end
end
defp to_exponent(n, factor, count \\ 0)
defp to_exponent(n, factor, count) do
if rem(n, factor) == 0 do
to_exponent(div(n, factor), factor, count + 1)
else
{n, count}
end
end
end
def main do
IO.read(:line) |> String.trim() |> String.to_integer() |> solve() |> IO.puts()
end
def solve(n) when 2 <= n and n <= 100_000_000 do
n
|> Factorization.to_list()
|> Enum.reduce(nil, fn
{_prime_number, number}, nil ->
number
{_prime_number, number}, acc ->
acc ^^^ number
end)
|> Kernel.==(0)
|> if do
"Bob"
else
"Alice"
end
end
end
"""
|
lib/100/p2.ex
| 0.648466
| 0.564819
|
p2.ex
|
starcoder
|
defmodule HomeBot.DataStore.TemperatureStore do
@moduledoc "The datastore for weather data"
alias HomeBot.DataStore.InfluxConnection
def create_database_if_not_exists do
InfluxConnection.query(
"CREATE DATABASE energy",
method: :post
)
end
def write_temperature_data(data) do
datapoints = data
|> Enum.reject(fn r -> is_nil(r[:temperature]) end)
|> Enum.map(&to_datapoint/1)
|> Enum.to_list()
:ok = InfluxConnection.write(%{
points: datapoints,
database: "energy"
})
end
def get_latest_weather_data do
%{results: results} = InfluxConnection.query(
"SELECT * FROM temperature GROUP BY * ORDER BY DESC LIMIT 1",
database: "energy"
)
%{series: [result]} = List.first(results)
zipped = Enum.zip(result.columns, List.first(result.values))
Enum.into(zipped, %{})
|> Map.update!("time", &to_timezone/1)
end
def get_average_temperature_per_day(:all) do
InfluxConnection.get_list(
"SELECT MEAN(temperature) as temperature FROM temperature GROUP BY time(1d)",
"energy")
|> fix_timezone()
end
def get_average_temperature_per_day(days) do
InfluxConnection.get_list(
"SELECT MEAN(temperature) as temperature FROM temperature WHERE time >= now() -#{days}d GROUP BY time(1d)",
"energy")
|> fix_timezone()
end
def get_average_temperature(start_time, end_time) do
InfluxConnection.get_single(
"SELECT MEAN(temperature) as temperature FROM temperature WHERE time >= '#{start_time}' AND time < '#{end_time}'",
"energy")
|> Map.update("time", Timex.now(), &to_timezone/1)
end
defp to_datapoint(record) do
%{
database: "energy",
measurement: "temperature",
fields: %{
temperature: record[:temperature] / 1, # Dividing by 1 to cast integer to float
humidity: record[:humidity],
precipitation: (record[:precipitation] || 0) / 1,
wind_direction: record[:wind_direction],
wind_speed: (record[:wind_speed] || 0) / 1
},
timestamp: DateTime.to_unix(record[:timestamp], :nanosecond)
}
end
defp fix_timezone(records) do
Enum.map(records, fn record ->
Map.update!(record, "time", &to_timezone/1)
end)
end
defp to_timezone(time) do
{:ok, dt} = NaiveDateTime.from_iso8601(time)
Timex.to_datetime(dt, "Europe/Amsterdam")
|> DateTime.to_iso8601()
end
end
|
lib/home_bot/data_store/temperature_store.ex
| 0.801431
| 0.547706
|
temperature_store.ex
|
starcoder
|
defmodule Oban.Peer do
@moduledoc """
The `Peer` module maintains leadership for a particular Oban instance within a cluster.
Leadership is used by plugins, primarily, to prevent duplicate work accross nodes. For example,
only the leader's `Cron` plugin will insert new jobs. You can use peer leadership to extend Oban
with custom plugins, or even within your own application.
Note a few important details about how peer leadership operates:
* Each peer checks for leadership at a 30 second interval. When the leader exits it broadcasts a
message to all other peers to encourage another one to assume leadership.
* Each Oban instances supervises a distinct `Oban.Peer` instance. That means that with multiple
Oban instances on the same node one instance may be the leader, while the others aren't.
* Without leadership, global plugins (Cron, Lifeline, Stager, etc.), will not run on any node.
## Available Peer Implementations
There are two built-in peering modules:
* `Oban.Peers.Postgres` — uses table-based leadership through the `oban_peers` table and works
in any environment, with or without clustering. This is the default.
* `Oban.Peers.Global` — coordinates global locks through distributed Erlang, requires
distributed Erlang.
## Examples
Check leadership for the default Oban instance:
Oban.Peer.leader?()
# => true
That is identical to using the name `Oban`:
Oban.Peer.leader?(Oban)
# => true
Check leadership for a couple of instances:
Oban.Peer.leader?(Oban.A)
# => true
Oban.Peer.leader?(Oban.B)
# => false
"""
alias Oban.{Config, Registry}
@type option ::
{:name, module()}
| {:conf, Config.t()}
| {:interval, timeout()}
@doc """
Starts a peer instance.
"""
@callback start_link([option()]) :: GenServer.on_start()
@doc """
Check whether the current peer instance leads the cluster.
"""
@callback leader?(pid()) :: boolean()
@doc """
Check whether the current instance leads the cluster.
## Example
Check leadership for the default Oban instance:
Oban.Peer.leader?()
# => true
Check leadership for an alternate instance named `Oban.Private`:
Oban.Peer.leader?(Oban.Private)
# => true
"""
@spec leader?(Config.t() | GenServer.server()) :: boolean()
def leader?(conf_or_name \\ Oban)
def leader?(%Config{} = conf) do
case Registry.whereis(conf.name, Oban.Peer) do
pid when is_pid(pid) ->
conf.peer.leader?(pid)
nil ->
false
end
end
def leader?(name) do
name
|> Oban.config()
|> leader?()
end
@doc false
@spec child_spec(Keyword.t()) :: Supervisor.child_spec()
def child_spec(opts) do
conf = Keyword.fetch!(opts, :conf)
opts = Keyword.put_new(opts, :name, conf.peer)
%{id: opts[:name], start: {conf.peer, :start_link, [opts]}}
end
end
|
lib/oban/peer.ex
| 0.894679
| 0.579847
|
peer.ex
|
starcoder
|
defmodule Engine.Predictions do
@moduledoc """
Maintains an up-to-date internal state of the realtime predictions of vehicles
in the system. Fetches from the GTFS-rt enhanced JSON file about once per
second.
Offers a `for_stop/1` public interface to get a list of Predictions.Prediction's
for a given GTFS stop.
"""
use GenServer
require Logger
@type state :: %{
last_modified_trip_updates: String.t() | nil,
last_modified_vehicle_positions: String.t() | nil,
trip_updates_table: :ets.tab()
}
@trip_updates_table :trip_updates
def start_link do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
@doc "The upcoming predicted times a vehicle will be at this stop"
@spec for_stop(String.t(), 0 | 1) :: [Predictions.Prediction.t()]
def for_stop(predictions_table_id \\ @trip_updates_table, gtfs_stop_id, direction_id) do
case :ets.lookup(predictions_table_id, {gtfs_stop_id, direction_id}) do
[{_, :none}] -> []
[{{^gtfs_stop_id, ^direction_id}, predictions}] -> predictions
_ -> []
end
end
def init(_) do
schedule_update(self())
@trip_updates_table =
:ets.new(@trip_updates_table, [:set, :protected, :named_table, read_concurrency: true])
{:ok,
%{
last_modified_trip_updates: nil,
last_modified_vehicle_positions: nil,
trip_updates_table: @trip_updates_table
}}
end
def handle_info(:update, state) do
schedule_update(self())
current_time = Timex.now()
{last_modified_trip_updates, vehicles_running_revenue_trips} =
download_and_process_trip_updates(
state[:last_modified_trip_updates],
current_time,
:trip_update_url,
state[:trip_updates_table]
)
{last_modified_vehicle_positions, stops_with_trains} =
download_and_process_vehicle_positions(
state[:last_modified_vehicle_positions],
:vehicle_positions_url
)
if vehicles_running_revenue_trips != nil && stops_with_trains do
Engine.Departures.update_train_state(
stops_with_trains,
vehicles_running_revenue_trips,
current_time
)
{:noreply,
%{
state
| last_modified_trip_updates: last_modified_trip_updates,
last_modified_vehicle_positions: last_modified_vehicle_positions
}}
else
{:noreply, state}
end
end
def handle_info(msg, state) do
Logger.info("#{__MODULE__} unknown message: #{inspect(msg)}")
{:noreply, state}
end
@spec download_and_process_trip_updates(
String.t() | nil,
DateTime.t(),
atom,
:ets.tab()
) :: {String.t() | nil, any()}
defp download_and_process_trip_updates(
last_modified,
current_time,
url,
ets_table
) do
full_url = Application.get_env(:realtime_signs, url)
case download_data(full_url, last_modified) do
{:ok, body, new_last_modified} ->
{new_predictions, vehicles_running_revenue_trips} =
body
|> Predictions.Predictions.parse_json_response()
|> Predictions.Predictions.get_all(current_time)
existing_predictions =
:ets.tab2list(ets_table) |> Enum.map(&{elem(&1, 0), :none}) |> Map.new()
all_predictions = Map.merge(existing_predictions, new_predictions)
:ets.insert(ets_table, Enum.into(all_predictions, []))
{new_last_modified, vehicles_running_revenue_trips}
:error ->
{last_modified, nil}
end
end
@spec download_and_process_vehicle_positions(String.t() | nil, atom()) ::
{String.t() | nil, %{String.t() => String.t()} | nil}
defp download_and_process_vehicle_positions(last_modified, url) do
full_url = Application.get_env(:realtime_signs, url)
case download_data(full_url, last_modified) do
{:ok, body, new_last_modified} ->
{new_last_modified, vehicle_positions_response_to_stops_with_trains(body)}
:error ->
{last_modified, nil}
end
end
@spec download_data(String.t(), String.t() | nil) ::
{:ok, String.t(), String.t() | nil} | :error
defp download_data(full_url, last_modified) do
http_client = Application.get_env(:realtime_signs, :http_client)
case http_client.get(
full_url,
if last_modified do
[{"If-Modified-Since", last_modified}]
else
[]
end,
timeout: 2000,
recv_timeout: 2000
) do
{:ok, %HTTPoison.Response{body: body, status_code: status, headers: headers}}
when status >= 200 and status < 300 ->
case Enum.find(headers, fn {header, _value} -> header == "Last-Modified" end) do
{"Last-Modified", last_modified} -> {:ok, body, last_modified}
_ -> {:ok, body, nil}
end
{:ok, %HTTPoison.Response{}} ->
:error
{:error, %HTTPoison.Error{reason: reason}} ->
Logger.warn("Could not fetch file from #{inspect(full_url)}: #{inspect(reason)}")
:error
end
end
defp schedule_update(pid) do
Process.send_after(pid, :update, 1_000)
end
@spec vehicle_positions_response_to_stops_with_trains(String.t()) :: %{String.t() => String.t()}
defp vehicle_positions_response_to_stops_with_trains(response) do
try do
response
|> Jason.decode!()
|> Map.get("entity")
|> Enum.filter(fn vehicle_position ->
get_in(vehicle_position, ["vehicle", "current_status"]) == "STOPPED_AT"
end)
|> Map.new(fn vehicle_position ->
{get_in(vehicle_position, ["vehicle", "stop_id"]),
get_in(vehicle_position, ["vehicle", "vehicle", "id"])}
end)
rescue
Jason.DecodeError -> %{}
end
end
end
|
lib/engine/predictions.ex
| 0.748214
| 0.535341
|
predictions.ex
|
starcoder
|
module Atom
% Returns true if an atom representation of the given string exists.
def exists?(string)
try
Erlang.binary_to_existing_atom(string.to_bin, 'utf8)
true
catch 'error: 'badarg
false
end
end
def from_char_list(list)
Erlang.list_to_atom(list)
end
module Behavior
% Returns a version of this atom that is a representation of itself.
%
% ## Examples
%
% 'a.inspect % => "'a"
% 'A.inspect % => "'A"
%
def inspect
inspect(self)
end
% Convert an atom to a char list.
%
% ## Examples
%
% 'a.to_char_list % => [97]
% 'A.to_char_list % => [65]
%
def to_char_list
Erlang.atom_to_list(self)
end
% Convert an atom to a string.
%
% ## Examples
%
% 'a.to_s % => "a"
% 'A.to_s % => "A"
%
def to_s
Erlang.atom_to_binary(self, 'utf8)
end
% Converts the given atom to a constant with the same name.
% Raises a no_module error if the constant does not exist.
%
% ## Examples
%
% 'Atom.to_module % => Atom
%
def to_module
Erlang.elixir_constants.lookup(self)
end
% A convenience method that returns a tuple representing a method.
% The syntax was borrowed from Erlang.
%
% ## Examples
%
% 'a/2 % => { 'a, 2 }
%
def /(arity)
{self, arity}
end
% Sends a message to the given that represents an registered process.
% This is the equivalent to Erlang's ! and is also aliased as `deliver`.
def <-(message)
Erlang.send(self, message)
end
alias_local '<-, 'dispatch, 1
private
def inspect(nil)
"nil"
end
def inspect(true)
"true"
end
def inspect(false)
"false"
end
def inspect(other)
bin = Erlang.atom_to_binary(other, 'utf8)
if ~r"\A@?(\w|::)*[?!]?\z".match?(bin)
<<$', bin|binary>>
else
<<$', $\", bin|binary, $\">>
end
end
end
end
|
lib/atom.ex
| 0.605682
| 0.658051
|
atom.ex
|
starcoder
|
defmodule Zachaeus.License do
@moduledoc """
A Zachaeus license contains all relevant data which is essential for a simple licensing system.
Due to the nature of this license, it can be used without a database, if you simply want to verify the validity of a license.
"""
alias Zachaeus.Error
## -- MODULE ATTRIBUTES
@default_timezone "Etc/UTC"
@separator_regex ~r/\|/
## -- STRUCT DATA
defstruct identifier: nil, plan: nil, valid_from: DateTime.utc_now(), valid_until: DateTime.utc_now()
@typedoc """
The license in the default format.
## License data
- `identifier` represents a user or any other entity etc.
- `plan` represents a specifc plan, e.g. to implement a varying behaviour of the application
- `valid_from` represents the beginning of the license
- `valid_until` represents the ending of the license
"""
@type t() :: %__MODULE__{
identifier: String.t(),
plan: String.t(),
valid_from: DateTime.t(),
valid_until: DateTime.t()
}
@typedoc """
The license in a serialized format.
## License encoding format
- The license data (identifier, plan, valid_from, valid_until) is separated by a `|` (pipe).
- None of the given license data is allowed to include a `|` (pipe) symbol.
- All timestamps are encoded in unix format within the UTC timezone.
## Example
Format: [<identifier>|<plan>|<valid_from>|<valid_until>]
Example: "my_user_id_1|default|1542279600|1573815600"
"""
@type serialized() :: String.t()
@typedoc """
The serialized, signed and encoded license.
## Signed license string
- The license is serialized, signed and an encoded string which contains the license data
- The first 64 byte of the signed license string represents the verification hash
## Example
Format: "VGVzdAJxQsXSrgYBkcwiOnWamiattqhhhNN_1jsY-LR_YbsoYpZ18-ogVSxWv7d8DlqzLSz9csqNtSzDk4y0JV5xaAE"
"""
@type signed() :: String.t()
## -- FUNCTIONS
@doc """
Serializes a license struct into the serialized license string format.
Before serializing, it does a validation of the given license data.
## Examples
iex> Zachaeus.License.serialize(%Zachaeus.License{identifier: 1, plan: "default", valid_from: "invalid datetime", valid_until: ~U[2019-11-15 11:00:00Z]})
{:error, %Zachaeus.Error{code: :invalid_timestamp_type, message: "Unable to cast timestamp to DateTime"}}
iex> Zachaeus.License.serialize(%Zachaeus.License{identifier: nil, plan: "default", valid_from: ~U[2018-11-15 11:00:00Z], valid_until: ~U[2019-11-15 11:00:00Z]})
{:error, %Zachaeus.Error{code: :empty_identifier, message: "The given identifier cannot be empty"}}
iex> Zachaeus.License.serialize(%Zachaeus.License{identifier: 1, plan: nil, valid_from: ~U[2018-11-15 11:00:00Z], valid_until: ~U[2019-11-15 11:00:00Z]})
{:error, %Zachaeus.Error{code: :empty_plan, message: "The given plan cannot be empty"}}
iex> Zachaeus.License.serialize(%Zachaeus.License{identifier: "my_user_id_1", plan: "default", valid_from: ~U[2018-11-15 11:00:00Z], valid_until: ~U[2019-11-15 11:00:00Z]})
{:ok, "my_user_id_1|default|1542279600|1573815600"}
"""
@spec serialize(__MODULE__.t()) :: {:ok, __MODULE__.serialized()} | {:error, Zachaeus.Error.t()}
def serialize(%__MODULE__{identifier: raw_identifier, plan: raw_plan, valid_from: raw_valid_from, valid_until: raw_valid_until}) do
with {:ok, casted_identifier} <- cast_string(raw_identifier),
{:ok, casted_plan} <- cast_string(raw_plan),
{:ok, casted_valid_from} <- cast_datetime(raw_valid_from),
{:ok, casted_valid_until} <- cast_datetime(raw_valid_until),
{:ok, identifier} <- validate_identifier(casted_identifier),
{:ok, plan} <- validate_plan(casted_plan),
{:ok, valid_from, valid_until} <- validate_timerange(casted_valid_from, casted_valid_until) do
{:ok, "#{identifier}|#{plan}|#{DateTime.to_unix(valid_from)}|#{DateTime.to_unix(valid_until)}"}
end
end
def serialize(_invalid_license), do: {:error, %Error{code: :invalid_license_type, message: "Unable to serialize license due to an invalid type"}}
@doc """
Deserializes a given license string, which was previously serialized, into a license struct.
After deserializing, it does a validation of the given license data.
## Examples
iex> Zachaeus.License.deserialize("my_user_id_1|default|invalid datetime|1573815600")
{:error, %Zachaeus.Error{code: :invalid_timestamp_type, message: "Unable to cast timestamp to DateTime"}}
iex> Zachaeus.License.deserialize(" |default|1542279600|1573815600")
{:error, %Zachaeus.Error{code: :empty_identifier, message: "The given identifier cannot be empty"}}
iex> Zachaeus.License.deserialize("my_user_id_1| |1542279600|1573815600")
{:error, %Zachaeus.Error{code: :empty_plan, message: "The given plan cannot be empty"}}
iex> Zachaeus.License.deserialize("absolutely_invalid_license_string")
{:error, %Zachaeus.Error{code: :invalid_license_format, message: "Unable to deserialize license string due to an invalid format"}}
iex> Zachaeus.License.deserialize("my_user_id_1|default|1542279600|1573815600")
{:ok, %Zachaeus.License{identifier: "my_user_id_1", plan: "default", valid_from: ~U[2018-11-15 11:00:00Z], valid_until: ~U[2019-11-15 11:00:00Z]}}
"""
@spec deserialize(serialized_license :: __MODULE__.serialized()) :: {:ok, __MODULE__.t()} | {:error, Zachaeus.Error.t()}
def deserialize(serialized_license) when is_binary(serialized_license) do
case String.split(serialized_license, @separator_regex, trim: true) do
[identifier_part, plan_part, valid_from_part, valid_until_part] ->
with {:ok, casted_identifier} <- cast_string(identifier_part),
{:ok, casted_plan} <- cast_string(plan_part),
{:ok, casted_valid_from} <- cast_datetime(valid_from_part),
{:ok, casted_valid_until} <- cast_datetime(valid_until_part),
{:ok, identifier} <- validate_identifier(casted_identifier),
{:ok, plan} <- validate_plan(casted_plan),
{:ok, valid_from, valid_until} <- validate_timerange(casted_valid_from, casted_valid_until) do
{:ok,
%__MODULE__{identifier: identifier, plan: plan, valid_from: valid_from, valid_until: valid_until}}
end
_invalid_serialized_license_format ->
{:error, %Error{code: :invalid_license_format, message: "Unable to deserialize license string due to an invalid format"}}
end
end
def deserialize(_invalid_serialized_license), do: {:error, %Error{code: :invalid_license_type, message: "Unable to deserialize license due to an invalid type"}}
@doc """
Validates a license and checks whether it is e.g. predated, expired or generally invalid.
When the license is valid, it returns the remaining license time in seconds.
## Examples
iex> Zachaeus.License.validate(%Zachaeus.License{identifier: "my_user_id_1", plan: "default", valid_from: ~U[2018-11-15 11:00:00Z], valid_until: ~U[2019-11-30 09:50:00Z]})
{:error, %Zachaeus.Error{code: :license_expired, message: "The license has expired"}}
iex> Zachaeus.License.validate(%{})
{:error, %Zachaeus.Error{code: :invalid_license_type, message: "The given license is invalid"}}
Zachaeus.License.validate(%Zachaeus.License{identifier: "my_user_id_1", plan: "default", valid_from: ~U[2018-11-15 11:00:00Z], valid_until: ~U[2099-11-30 09:50:00Z]})
{:ok, 12872893}
"""
@spec validate(__MODULE__.t()) :: {:ok, Integer.t()} | {:error, Zachaeus.Error.t()}
def validate(%__MODULE__{valid_from: valid_from, valid_until: valid_until}) do
with {:ok, valid_from} <- shift_datetime(valid_from),
{:ok, valid_until} <- shift_datetime(valid_until),
{:ok, validation_datetime} <- shift_datetime(DateTime.utc_now())
do
case DateTime.compare(valid_from, validation_datetime) do
from_timerange when from_timerange in [:eq, :lt] ->
case DateTime.compare(valid_until, validation_datetime) do
until_timerange when until_timerange in [:eq, :gt] ->
{:ok, DateTime.diff(valid_until, validation_datetime)}
_outdated_license ->
{:error, %Error{code: :license_expired, message: "The license has expired"}}
end
_predated_license ->
{:error, %Error{code: :license_predated, message: "The license is not yet valid"}}
end
end
end
def validate(_invalid_license), do: {:error, %Error{code: :invalid_license_type, message: "The given license is invalid"}}
@doc """
Validates a license and checks whether it is e.g. predated, expired or generally invalid and indicates that with a boolean.
## Examples
iex> Zachaeus.License.valid?(%Zachaeus.License{identifier: "my_user_id_1", plan: "default", valid_from: ~U[2018-11-15 11:00:00Z], valid_until: ~U[2019-11-30 09:50:00Z]})
false
iex> Zachaeus.License.valid?(%{})
false
iex> Zachaeus.License.valid?(%Zachaeus.License{identifier: "my_user_id_1", plan: "default", valid_from: ~U[2018-11-15 11:00:00Z], valid_until: ~U[2099-11-30 09:50:00Z]})
true
"""
@spec valid?(__MODULE__.t()) :: boolean()
def valid?(%__MODULE__{} = license) do
case validate(license) do
{:ok, _remaining_time} -> true
_invalid_license -> false
end
end
def valid?(_invalid_license), do: false
## -- CAST HELPER FUNCTIONS
@spec cast_string(data :: String.t() | Integer.t() | Float.t() | Atom.t() | nil) :: {:ok, String.t()} | {:error, Zachaeus.Error.t()}
defp cast_string(data) when is_binary(data), do: {:ok, String.trim(data)}
defp cast_string(data) when is_number(data) or is_atom(data) or is_nil(data) do
data
|> to_string()
|> cast_string()
end
defp cast_string(_invalid_data), do: {:error, %Error{code: :invalid_string_type, message: "Unable to cast data to String"}}
@spec cast_datetime(timestamp :: DateTime.t() | Integer.t() | Float.t() | String.t()) :: {:ok, DateTime.t()} | {:error, Zachaeus.Error.t()}
defp cast_datetime(%DateTime{} = timestamp), do: shift_datetime(timestamp)
defp cast_datetime(timestamp) when is_integer(timestamp) do
case DateTime.from_unix(timestamp) do
{:ok, %DateTime{} = timestamp} ->
shift_datetime(timestamp)
_unable_to_cast_to_datetime ->
{:error, %Error{code: :invalid_timestamp_type, message: "Unable to cast timestamp to DateTime"}}
end
end
defp cast_datetime(timestamp) when is_float(timestamp) do
timestamp
|> Kernel.trunc()
|> cast_datetime()
end
defp cast_datetime(timestamp) when is_binary(timestamp) do
case Integer.parse(timestamp) do
{timestamp, _unparsable_data} ->
cast_datetime(timestamp)
_unable_to_cast_to_integer ->
{:error, %Error{code: :invalid_timestamp_type, message: "Unable to cast timestamp to DateTime"}}
end
end
defp cast_datetime(_invalid_timestamp), do: {:error, %Error{code: :invalid_timestamp_type, message: "Unable to cast timestamp to DateTime"}}
## -- VALIDATION HELPER FUNCTIONS
@spec validate_identifier(identifier :: String.t()) :: {:ok, String.t()} | {:error, Zachaeus.Error.t()}
defp validate_identifier(identifier) when is_binary(identifier) do
cond do
identifier |> String.trim() |> String.length() <= 0 ->
{:error, %Error{code: :empty_identifier, message: "The given identifier cannot be empty"}}
Regex.match?(@separator_regex, identifier) ->
{:error, %Error{code: :invalid_identifer, message: "The given identifier contains a reserved character"}}
true ->
{:ok, identifier}
end
end
defp validate_identifier(_invalid_identifier), do: {:error, %Error{code: :invalid_identifer, message: "The given identifier is not a String"}}
@spec validate_plan(plan :: String.t()) :: {:ok, String.t()} | {:error, Zachaeus.Error.t()}
defp validate_plan(plan) when is_binary(plan) do
cond do
plan |> String.trim() |> String.length() <= 0 ->
{:error, %Error{code: :empty_plan, message: "The given plan cannot be empty"}}
Regex.match?(@separator_regex, plan) ->
{:error, %Error{code: :invalid_plan, message: "The given plan contains a reserved character"}}
true ->
{:ok, plan}
end
end
defp validate_plan(_invalid_plan), do: {:error, %Error{code: :invalid_plan, message: "The given plan is not a String"}}
@spec validate_timerange(DateTime.t(), DateTime.t()) :: {:ok, DateTime.t(), DateTime.t()} | {:error, Zachaeus.Error.t()}
defp validate_timerange(%DateTime{} = valid_from, %DateTime{} = valid_until) do
with {:ok, valid_from} <- shift_datetime(valid_from), {:ok, valid_until} <- shift_datetime(valid_until) do
case DateTime.compare(valid_from, valid_until) do
timerange when timerange in [:eg, :lt] ->
{:ok, valid_from, valid_until}
_invalid_timerange ->
{:error, %Error{code: :invalid_timerange, message: "The given timerange is invalid"}}
end
end
end
defp validate_timerange(_invalid_valid_from, _invalid_valid_until), do: {:error, %Error{code: :invalid_timerange, message: "The the given timerange needs a beginning and an ending DateTime"}}
## -- GENERAL HELPER FUNCTIONS
@spec shift_datetime(timestamp :: DateTime.t()) :: {:ok, DateTime.t()} | {:error, Zachaeus.Error.t()}
defp shift_datetime(%DateTime{} = timestamp), do: DateTime.shift_zone(timestamp, @default_timezone)
defp shift_datetime(_invalid_timestamp), do: {:error, %Error{code: :invalid_timestamp, message: "The timestamp cannot be shifted to UTC timezone"}}
end
|
lib/zachaeus/license.ex
| 0.856122
| 0.520862
|
license.ex
|
starcoder
|
defmodule Contex.SVG.Sanitize do
@moduledoc false
#Basically a copy/paste of Plug.HTML. Copied here to avoid a substantial dependency
#License:
#Copyright (c) 2013 Plataformatec.
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
@doc """
Very basic approach to sanitizing strings for titles etc - it is effectively run
through Plug.HTML.html_escape
"""
@spec basic_sanitize(any()) :: any()
def basic_sanitize(data) when is_binary(data), do: html_escape(data)
def basic_sanitize(data) when is_number(data), do: data
@spec html_escape(String.t()) :: String.t()
def html_escape(data) when is_binary(data) do
IO.iodata_to_binary(to_iodata(data, 0, data, []))
end
@spec html_escape_to_iodata(String.t()) :: iodata
def html_escape_to_iodata(data) when is_binary(data) do
to_iodata(data, 0, data, [])
end
escapes = [
{?<, "<"},
{?>, ">"},
{?&, "&"},
{?", """},
{?', "'"}
]
for {match, insert} <- escapes do
defp to_iodata(<<unquote(match), rest::bits>>, skip, original, acc) do
to_iodata(rest, skip + 1, original, [acc | unquote(insert)])
end
end
defp to_iodata(<<_char, rest::bits>>, skip, original, acc) do
to_iodata(rest, skip, original, acc, 1)
end
defp to_iodata(<<>>, _skip, _original, acc) do
acc
end
for {match, insert} <- escapes do
defp to_iodata(<<unquote(match), rest::bits>>, skip, original, acc, len) do
part = binary_part(original, skip, len)
to_iodata(rest, skip + len + 1, original, [acc, part | unquote(insert)])
end
end
defp to_iodata(<<_char, rest::bits>>, skip, original, acc, len) do
to_iodata(rest, skip, original, acc, len + 1)
end
defp to_iodata(<<>>, 0, original, _acc, _len) do
original
end
defp to_iodata(<<>>, skip, original, acc, len) do
[acc | binary_part(original, skip, len)]
end
end
|
lib/chart/svg_sanitize.ex
| 0.592077
| 0.493103
|
svg_sanitize.ex
|
starcoder
|
defmodule Ambry.Series do
@moduledoc """
Functions for dealing with Series.
"""
import Ambry.SearchUtils
import Ecto.Query
alias Ambry.Repo
alias Ambry.Series.{Series, SeriesBook}
@doc """
Returns a limited list of series and whether or not there are more.
By default, it will limit to the first 10 results. Supply `offset` and `limit`
to change this. Also can optionally filter by the given `filter` string.
## Examples
iex> list_series()
{[%Series{}, ...], true}
"""
def list_series(offset \\ 0, limit \\ 10, filter \\ nil) do
over_limit = limit + 1
query =
from s in Series,
offset: ^offset,
limit: ^over_limit,
order_by: :name
query =
if filter do
name_query = "%#{filter}%"
from s in query, where: ilike(s.name, ^name_query)
else
query
end
series = Repo.all(query)
series_to_return = Enum.slice(series, 0, limit)
{series_to_return, series != series_to_return}
end
@doc """
Gets a single series.
Raises `Ecto.NoResultsError` if the Series does not exist.
## Examples
iex> get_series!(123)
%Series{}
iex> get_series!(456)
** (Ecto.NoResultsError)
"""
def get_series!(id) do
series_book_query = from sb in SeriesBook, order_by: [asc: sb.book_number]
Series
|> preload(series_books: ^{series_book_query, [:book]})
|> Repo.get!(id)
end
@doc """
Creates a series.
## Examples
iex> create_series(%{field: value})
{:ok, %Series{}}
iex> create_series(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_series(attrs \\ %{}) do
%Series{}
|> Series.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a series.
## Examples
iex> update_series(series, %{field: new_value})
{:ok, %Series{}}
iex> update_series(series, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_series(%Series{} = series, attrs) do
series
|> Series.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a series.
## Examples
iex> delete_series(series)
{:ok, %Series{}}
iex> delete_series(series)
{:error, %Ecto.Changeset{}}
"""
def delete_series(%Series{} = series) do
Repo.delete(series)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking series changes.
## Examples
iex> change_series(series)
%Ecto.Changeset{data: %Series{}}
"""
def change_series(%Series{} = series, attrs \\ %{}) do
Series.changeset(series, attrs)
end
@doc """
Gets a series and all of its books.
Books are listed in ascending order based on series book number.
"""
def get_series_with_books!(series_id) do
series_book_query = from sb in SeriesBook, order_by: [asc: sb.book_number]
Series
|> preload(series_books: ^{series_book_query, [book: [:authors, series_books: :series]]})
|> Repo.get!(series_id)
end
@doc """
Finds series that match a query string.
Returns a list of tuples of the form `{jaro_distance, series}`.
"""
def search(query_string, limit \\ 15) do
name_query = "%#{query_string}%"
query = from s in Series, where: ilike(s.name, ^name_query), limit: ^limit
series_book_query = from sb in SeriesBook, order_by: [asc: sb.book_number]
query
|> preload(series_books: ^{series_book_query, [book: [:authors, series_books: :series]]})
|> Repo.all()
|> sort_by_jaro(query_string, :name)
end
@doc """
Returns all series for use in `Select` components.
"""
def for_select do
query = from s in Series, select: {s.name, s.id}, order_by: s.name
Repo.all(query)
end
end
|
lib/ambry/series.ex
| 0.886162
| 0.584153
|
series.ex
|
starcoder
|
defmodule ExAequo.Enum do
alias ExAequo.Error
@moduledoc """
## ExAequo.Enum offers some extension functions for Elixir's Enum module
### Grouped Accumulation
Groupes accumulated values of an Enum according to a function that
indicates if two consequent items are of the same kind and if so
how to accumulate their two values.
The `grouped_reduce` function returns the groupes in reverse order, as,
during traversal of lists quite often reversing the result of the
classical "take first and push a function of it to the result" pattern
cancels out.
An optional, `reverse: true` keyword option can be provided to reverse
the final result for convenience.
iex(0)> add_same = fn {x, a}, {y, b} ->
...(0)> cond do
...(0)> x == y -> {:cont, {x, a + b}}
...(0)> true -> {:stop, nil} end end
...(0)> E.grouped_reduce(
...(0)> [{:a, 1}, {:a, 2}, {:b, 3}, {:b, 4}], add_same)
[{:b, 7}, {:a, 3}]
The `grouped_inject` function behaves almost identically to `grouped_reduce`,
however an initial value is provided.
iex(1)> sub_same = fn {x, a}, {y, b} ->
...(1)> cond do
...(1)> x == y -> {:cont, {x, a - b}}
...(1)> true -> {:stop, nil}
...(1)> end
...(1)> end
...(1)> E.grouped_inject(
...(1)> [{:a, 1}, {:b, 2}, {:b, 2}, {:c, 2}, {:c, 1}, {:c, 1}],
...(1)> {:a, 43}, sub_same, reverse: true)
[a: 42, b: 0, c: 0]
"""
@type grouped_accumulator(ele_type, result_type) :: (ele_type , ele_type -> {:cont | :stop, result_type})
@doc false
@spec grouped_reduce( list(ele_type), grouped_accumulator(ele_type, result_type), Keyword.t )
:: list(result_type) when ele_type: any(), result_type: any()
def grouped_reduce(xs, gacc_fn, options \\ [])
def grouped_reduce([], _, _), do: []
def grouped_reduce([x|xs], f, options) do
if options[:reverse] do
grouped_acc_impl(xs, x, f, []) |> Enum.reverse()
else
grouped_acc_impl(xs, x, f, [])
end
end
@doc false
@spec grouped_inject( list(ele_type), ele_type, grouped_accumulator(ele_type, result_type), Keyword.t )
:: list(result_type) when ele_type: any(), result_type: any()
def grouped_inject(xs, initial, gacc_fn, options \\ [])
def grouped_inject(xs, initial, f, options) do
if options[:reverse] do
grouped_acc_impl(xs, initial, f, []) |> Enum.reverse()
else
grouped_acc_impl(xs, initial, f, [])
end
end
@spec grouped_acc_impl( list(ele_type), ele_type, grouped_accumulator(ele_type, result_type), Keyword.t )
:: list(result_type) when ele_type: any(), result_type: any()
defp grouped_acc_impl(xs, acc, f, result)
defp grouped_acc_impl([], acc, _, result), do: [acc|result]
defp grouped_acc_impl([x|xs], acc, f, result) do
case f.(acc, x) do
{:cont, combination} -> grouped_acc_impl(xs, combination, f, result)
{:stop, _} -> grouped_acc_impl(xs, x, f, [acc|result])
_ -> raise Error, "function must be of type {:cont, any()} | {:stop, any()}"
end
end
end
|
lib/ex_aequo/enum.ex
| 0.714528
| 0.668915
|
enum.ex
|
starcoder
|
defmodule Animals do
@moduledoc """
Documentation for Animals.
"""
@doc """
Hello world.
## Examples
iex> Animals.hello
:world
"""
def hello do
:world
end
@doc """
create_zoo returns a list of zoo animals
## Examples
iex> Animals.create_zoo
["lion", "tiger", "gorilla", "elephant", "monkey", "giraffe"]
"""
def create_zoo do
["lion", "tiger", "gorilla", "elephant", "monkey", "giraffe"]
end
@doc """
Randomozie takes a list of zoo animals and returns a new randomised list with
the same elements as the first.
## Examples
iex> zoo = Animals.create_zoo
iex> Animals.randomise(zoo)
["monkey", "tiger", "elephant", "gorilla", "giraffe", "lion"]
"""
def randomise(zoo) do
Enum.shuffle(zoo)
end
@doc """
contains? takes a list of zoo animals and a single animal and returns a boolean
as to whether or not the list contains the given animal.
## Examples
iex> zoo = Animals.create_zoo
iex> Animals.contains?(zoo, "gorilla")
true
"""
def contains?(zoo, animal) do
Enum.member?(zoo, animal)
end
@doc """
see_animals takes a list of zoo animals and the number of animals that
you want to see and then returns a list
## Examples
iex> zoo = Animals.create_zoo
iex> Animals.see_animals(zoo, 2)
["monkey", "giraffe"]
"""
def see_animals(zoo, count) do
{_seen, to_see} = Enum.split(zoo, -count)
to_see
end
@doc """
save takes a list of zoo animals and a filename and saves the list to that file
## Examples
iex> zoo = Animals.create_zoo
iex> Animals.save(zoo, "my_animals")
:ok
"""
def save(zoo, filename) do
# erlang is converting the zoo list to something that can be written to the
# file system
binary = :erlang.term_to_binary(zoo)
File.write(filename, binary)
end
@doc """
load takes filename and returns a list of animals if the file exists
## Examples
iex> Animals.load("my_animals")
["lion", "tiger", "gorilla", "elephant", "monkey", "giraffe"]
iex> Animals.load("aglkjhdfg")
"File does not exist"
"""
def load(filename) do
case File.read(filename) do
{:ok, binary} -> :erlang.binary_to_term(binary)
{:error, _reason} -> "File does not exist"
end
end
@doc """
selection takes a number, creates a zoo, randomises it and then returns a list
of animals of length selected
## Examples
iex> Animals.selection(2)
["gorilla", "giraffe"]
"""
def selection(number_of_animals) do
Animals.create_zoo()
|> Animals.randomise()
|> Animals.see_animals(number_of_animals)
end
end
|
elixir/animals/lib/animals.ex
| 0.826362
| 0.53959
|
animals.ex
|
starcoder
|
defmodule ExCubicIngestion.ProcessIncoming do
@moduledoc """
ProcessIncoming server.
Every @wait_interval_ms, scans the Incoming bucket for table prefixes. If a
prefix is present here and has a record in CubicTable, the prefix is
scanned for files, which are inserted as CubicLoad records to be processed
in the future.
"""
use GenServer
alias ExCubicIngestion.S3Scan
alias ExCubicIngestion.Schema.CubicLoad
alias ExCubicIngestion.Schema.CubicTable
@wait_interval_ms 5_000
@opaque t :: %__MODULE__{lib_ex_aws: module()}
defstruct lib_ex_aws: ExAws
# client methods
@spec start_link(Keyword.t()) :: GenServer.on_start()
def start_link(opts) do
GenServer.start_link(__MODULE__, opts)
end
@spec status(GenServer.server()) :: :running
def status(server) do
GenServer.call(server, :status)
end
# callbacks
@impl GenServer
def init(opts) do
# construct state
state = struct!(__MODULE__, opts)
{:ok, state, 0}
end
@impl GenServer
def handle_info(:timeout, %{} = state) do
run(state)
{:noreply, state, @wait_interval_ms}
end
@impl GenServer
def handle_call(:status, _from, state) do
{:reply, :running, state}
end
# server helper functions
@spec run(t) :: :ok
def run(state) do
incoming_bucket = Application.fetch_env!(:ex_cubic_ingestion, :s3_bucket_incoming)
incoming_prefix = Application.fetch_env!(:ex_cubic_ingestion, :s3_bucket_prefix_incoming)
table_prefixes =
state
|> prefixes_list(incoming_bucket, incoming_prefix)
|> Enum.filter(&Map.has_key?(&1, :prefix))
|> Enum.map(fn %{prefix: prefix} -> String.replace_prefix(prefix, incoming_prefix, "") end)
|> CubicTable.filter_to_existing_prefixes()
for {table_prefix, table} <- table_prefixes do
incoming_bucket
|> S3Scan.list_objects_v2(
prefix: "#{incoming_prefix}#{table_prefix}",
lib_ex_aws: state.lib_ex_aws
)
|> Enum.filter(&Map.has_key?(&1, :key))
|> Enum.map(fn object ->
%{object | key: String.replace_prefix(object[:key], incoming_prefix, "")}
end)
|> CubicLoad.insert_new_from_objects_with_table(table)
end
:ok
end
@doc """
Gets a list of S3 prefixes for each vendor to be used in determining table prefixes.
"""
@spec prefixes_list(t, String.t(), String.t()) :: Enumerable.t()
def prefixes_list(state, incoming_bucket, incoming_prefix) do
ods_qlik_list =
S3Scan.list_objects_v2(
incoming_bucket,
prefix: "#{incoming_prefix}cubic/ods_qlik/",
delimiter: "/",
lib_ex_aws: state.lib_ex_aws
)
dmap_list =
S3Scan.list_objects_v2(
incoming_bucket,
prefix: "#{incoming_prefix}cubic/dmap/",
delimiter: "/",
lib_ex_aws: state.lib_ex_aws
)
Enum.concat(ods_qlik_list, dmap_list)
end
end
|
ex_cubic_ingestion/lib/ex_cubic_ingestion/process_incoming.ex
| 0.748076
| 0.424531
|
process_incoming.ex
|
starcoder
|
defmodule Tracex.Trace do
@moduledoc """
Collection of helpers to extract data from compiler traces
Compiler traces encapsulate events happening in certain environment.
The module originating an event is considered an *outbound* module,
the module on the receiving end is considered an *inbound* module.
For example, consider a trace emitted when module `A` imports a function
from module `B`. `A` is denoted as outbound and `B` as inbound.
Traces of local function or marco calls naturally have the same module as
both inbound and outbound.
Note that inbound/outbound notions translate directly to direction of
the edge between the two modules involved in project's module dependency graph.
"""
@type t :: {event, env}
@type event :: tuple
@type env :: %{
aliases: Macro.Env.aliases(),
context: Macro.Env.context(),
context_modules: Macro.Env.context_modules(),
file: binary,
function: Macro.Env.name_arity() | nil,
line: Macro.Env.line(),
module: atom
}
@doc """
Returns trace's inbound module
"""
@spec inbound_module(t) :: atom
def inbound_module({event, env}) do
case event do
{:import, _, module, _} -> module
{:imported_function, _, module, _, _} -> module
{:imported_macro, _, module, _, _} -> module
{:alias, _, module, _, _} -> module
{:alias_expansion, _, _, module} -> module
{:alias_reference, _, module} -> module
{:require, _, module, _} -> module
{:struct_expansion, _, module, _} -> module
{:remote_function, _, module, _, _} -> module
{:remote_macro, _, module, _, _} -> module
{:local_function, _, _, _} -> env.module
{:local_macro, _, _, _} -> env.module
_ -> raise "cannot extract module from event: #{inspect(event)}"
end
end
@doc """
Returns trace's outbound module
"""
@spec outbound_module(t) :: atom
def outbound_module({_, env}), do: env.module
@doc """
Returns true if trace describes a remote function or macro call
"""
@spec remote_call?(t) :: boolean
def remote_call?({event, _env}) do
elem(event, 0) in [:remote_function, :remote_macro]
end
@doc """
Returns function name and arity formatted as `function/arity` for remote call traces
"""
@spec event_func_and_arity(t) :: binary
def event_func_and_arity({event, _env}) do
case event do
{:remote_function, _, _, name, arity} -> "#{name}/#{arity}"
{:remote_macro, _, _, name, arity} -> "#{name}/#{arity}"
_ -> "cannot extract func and arity from event #{inspect(event)}"
end
end
@doc """
Returns location in code where a trace originates formatted as `path:line`
Elixir compiler does not always provide a precise line number of the code in question,
but rather the line of where its execution environment is defined.
For example if an event originates in function's body, a line in which the function
is defined is returned.
"""
@spec event_location(t) :: binary
def event_location({event, env}) do
meta = elem(event, 1)
line = Keyword.get(meta, :line, env.line)
"#{env.file}:#{line}"
end
@doc """
Returns true if trace describes a module definition
"""
@spec module_definition?(t) :: boolean
def module_definition?({event, _env}) do
case event do
{:defmodule, _} -> true
_ -> false
end
end
@doc """
Returns true if trace describes using given module via `use GivenModule`
"""
@spec macro_usage?(t, atom) :: boolean
def macro_usage?({event, _env}, module) do
case event do
{:remote_macro, _, ^module, :__using__, 1} -> true
_ -> false
end
end
@doc """
Returns true if given `module` is on the receiving end of the traced event
"""
@spec inbound?(t, atom) :: boolean
def inbound?({event, _} = trace, module) do
case event do
{:local_function, _, _, _} -> false
{:local_macro, _, _, _} -> false
_ -> inbound_module(trace) == module
end
end
@doc """
Returns true if given `module` is originating the traced event
"""
@spec outbound?(t, atom) :: boolean
def outbound?({event, _} = trace, module) do
case event do
{:local_function, _, _, _} -> false
{:local_macro, _, _, _} -> false
_ -> outbound_module(trace) == module
end
end
end
|
lib/tracex/trace.ex
| 0.894778
| 0.523664
|
trace.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.