code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Quantum.Normalizer do
@moduledoc """
Normalize Config values into a `Quantum.Job`.
"""
alias Quantum.Job
alias Crontab.CronExpression.Parser, as: CronExpressionParser
alias Crontab.CronExpression
alias Quantum.RunStrategy.NodeList
@type config_short_notation :: {config_schedule, config_task}
@type config_full_notation :: {config_name | nil, Keyword.t() | map}
@type config_schedule ::
CronExpression.t() | String.t() | {:cron, String.t()} | {:extended, String.t()}
@type config_task :: {module, fun, [any]} | (() -> any)
@type config_name :: String.t() | atom
@doc """
Normalize Config Input into `Quantum.Job`.
### Parameters:
* `base` - Empty `Quantum.Job`
* `job` - The Job To Normalize
"""
@spec normalize(Job.t(), config_full_notation | config_short_notation) :: Job.t() | no_return
def normalize(base, job)
def normalize(%Job{} = base, job) when is_list(job) do
normalize_options(base, job |> Enum.into(%{}))
end
def normalize(%Job{} = base, {job_name, opts}) when is_list(opts) do
normalize(base, {job_name, opts |> Enum.into(%{})})
end
def normalize(%Job{} = base, {nil, opts}) when is_map(opts) do
normalize_options(base, opts)
end
def normalize(%Job{} = base, {job_name, opts}) when is_map(opts) do
opts = Map.put(opts, :name, job_name)
normalize_options(base, opts)
end
def normalize(%Job{} = base, {schedule, task}) do
normalize_options(base, %{schedule: schedule, task: task})
end
@spec normalize_options(Job.t(), map) :: Job.t()
defp normalize_options(job, options) do
Enum.reduce(options, job, &normalize_job_option/2)
end
@spec normalize_job_option({atom, any}, Job.t()) :: Job.t()
defp normalize_job_option({:name, name}, job) do
Job.set_name(job, normalize_name(name))
end
defp normalize_job_option({:schedule, schedule}, job) do
Job.set_schedule(job, normalize_schedule(schedule))
end
defp normalize_job_option({:task, task}, job) do
Job.set_task(job, normalize_task(task))
end
defp normalize_job_option({:run_strategy, run_strategy}, job) do
Job.set_run_strategy(job, normalize_run_strategy(run_strategy))
end
defp normalize_job_option({:overlap, overlap}, job) do
Job.set_overlap(job, overlap)
end
defp normalize_job_option({:timezone, timezone}, job) do
Job.set_timezone(job, normalize_timezone(timezone))
end
defp normalize_job_option(_, job), do: job
@spec normalize_task(config_task) :: Job.task() | no_return
defp normalize_task({mod, fun, args}), do: {mod, fun, args}
defp normalize_task(fun) when is_function(fun, 0), do: fun
defp normalize_task(fun) when is_function(fun),
do: raise("Only 0 arity functions are supported via the short syntax.")
@doc false
@spec normalize_schedule(config_schedule) :: Job.schedule() | no_return
def normalize_schedule(nil), do: nil
def normalize_schedule(%CronExpression{} = e), do: e
def normalize_schedule(e) when is_binary(e),
do: e |> String.downcase() |> CronExpressionParser.parse!()
def normalize_schedule({:cron, e}) when is_binary(e),
do: e |> String.downcase() |> CronExpressionParser.parse!()
def normalize_schedule({:extended, e}) when is_binary(e),
do: e |> String.downcase() |> CronExpressionParser.parse!(true)
@spec normalize_name(atom | String.t()) :: atom
defp normalize_name(name) when is_binary(name), do: String.to_atom(name)
defp normalize_name(name) when is_atom(name), do: name
@spec normalize_run_strategy({module, any} | module) :: NodeList
defp normalize_run_strategy(strategy) when is_atom(strategy) do
strategy.normalize_config!(nil)
end
defp normalize_run_strategy({strategy, options}) when is_atom(strategy) do
strategy.normalize_config!(options)
end
@spec normalize_timezone(String.t() | :utc | :local) :: String.t() | :utc | :local
defp normalize_timezone(timezone) when is_binary(timezone), do: timezone
defp normalize_timezone(:utc), do: :utc
defp normalize_timezone(:local), do: :local
defp normalize_timezone(timezone), do: raise("Invalid timezone: #{inspect(timezone)}")
end
|
lib/quantum/normalizer.ex
| 0.893269
| 0.412264
|
normalizer.ex
|
starcoder
|
defmodule ESpec.Assertions.RaiseException do
@moduledoc """
Defines 'raise_exception' assertion.
it do: expect(function).to raise_exception
it do: expect(function).to raise_exception(ErrorModule)
it do: expect(function).to raise_exception(ErrorModule, "message")
"""
use ESpec.Assertions.Interface
defp match(subject, []) do
try do
subject.()
{false, false}
rescue
_error -> {true, false}
end
end
defp match(subject, [module]) do
try do
subject.()
{false, {false, nil}}
rescue
error ->
if error.__struct__ == module do
{true, error.__struct__}
else
{false, error.__struct__}
end
end
end
defp match(subject, [module, mes]) do
try do
subject.()
{false, false}
rescue
error ->
if error.__struct__ == module && Exception.message(error) == mes do
{true, [error.__struct__, Exception.message(error)]}
else
{false, [error.__struct__, Exception.message(error)]}
end
end
end
defp success_message(subject, [], _result, positive) do
to = if positive, do: "raises", else: "doesn't raise"
"#{inspect(subject)} #{to} an exception."
end
defp success_message(subject, [module], _result, positive) do
to = if positive, do: "raises", else: "doesn't raise"
"#{inspect(subject)} #{to} the `#{module}` exception."
end
defp success_message(subject, [module, message], _result, positive) do
to = if positive, do: "raises", else: "doesn't raise"
"#{inspect(subject)} #{to} the `#{module}` exception with the message `#{message}`."
end
defp error_message(subject, [], false, positive) do
if positive do
"Expected #{inspect(subject)} to raise an exception, but nothing was raised."
else
"Expected #{inspect(subject)} not to raise an exception, but an exception was raised."
end
end
defp error_message(subject, [module], err_module, positive) do
if positive do
case err_module do
{false, nil} ->
"Expected #{inspect(subject)} to raise the `#{module}` exception, but nothing was raised."
err_module ->
"Expected #{inspect(subject)} to raise the `#{module}` exception, but `#{err_module}` was raised instead."
end
else
"Expected #{inspect(subject)} not to raise the `#{module}` exception, but the `#{err_module}` exception was raised."
end
end
defp error_message(subject, [module, message], false, positive) do
to = if positive, do: "to", else: "not to"
"Expected #{inspect(subject)} #{to} raise the `#{module}` exception with the message `#{
message
}`, but nothing was raised."
end
defp error_message(subject, [module, message], [err_module, err_message], positive) do
to = if positive, do: "to", else: "not to"
"Expected #{inspect(subject)} #{to} raise the `#{module}` exception with the message `#{
message
}`, but the `#{err_module}` exception was raised with the message `#{err_message}`."
end
end
|
lib/espec/assertions/raise_exception.ex
| 0.546496
| 0.505676
|
raise_exception.ex
|
starcoder
|
defmodule AWS.StorageGateway do
@moduledoc """
Storage Gateway Service
Storage Gateway is the service that connects an on-premises software appliance
with cloud-based storage to provide seamless and secure integration between an
organization's on-premises IT environment and the Amazon Web Services storage
infrastructure.
The service enables you to securely upload data to the Cloud for cost effective
backup and rapid disaster recovery.
Use the following links to get started using the *Storage Gateway Service API
Reference*:
* [Storage Gateway required request headers](https://docs.aws.amazon.com/storagegateway/latest/userguide/AWSStorageGatewayAPI.html#AWSStorageGatewayHTTPRequestsHeaders):
Describes the required headers that you must send with every POST request to
Storage Gateway.
* [Signing requests](https://docs.aws.amazon.com/storagegateway/latest/userguide/AWSStorageGatewayAPI.html#AWSStorageGatewaySigningRequests):
Storage Gateway requires that you authenticate every request you send; this
topic describes how sign such a request.
* [Error responses](https://docs.aws.amazon.com/storagegateway/latest/userguide/AWSStorageGatewayAPI.html#APIErrorResponses):
Provides reference information about Storage Gateway errors.
* [Operations in Storage Gateway](https://docs.aws.amazon.com/storagegateway/latest/APIReference/API_Operations.html):
Contains detailed descriptions of all Storage Gateway operations, their request
parameters, response elements, possible errors, and examples of requests and
responses.
* [Storage Gateway endpoints and quotas](https://docs.aws.amazon.com/general/latest/gr/sg.html): Provides a list
of each Region and the endpoints available for use with Storage Gateway.
Storage Gateway resource IDs are in uppercase. When you use these resource IDs
with the Amazon EC2 API, EC2 expects resource IDs in lowercase. You must change
your resource ID to lowercase to use it with the EC2 API. For example, in
Storage Gateway the ID for a volume might be `vol-AA22BB012345DAF670`. When you
use this ID with the EC2 API, you must change it to `vol-aa22bb012345daf670`.
Otherwise, the EC2 API might not behave as expected.
IDs for Storage Gateway volumes and Amazon EBS snapshots created from gateway
volumes are changing to a longer format. Starting in December 2016, all new
volumes and snapshots will be created with a 17-character string. Starting in
April 2016, you will be able to use these longer IDs so you can test your
systems with the new format. For more information, see [Longer EC2 and EBS resource IDs](http://aws.amazon.com/ec2/faqs/#longer-ids).
For example, a volume Amazon Resource Name (ARN) with the longer volume ID
format looks like the following:
`arn:aws:storagegateway:us-west-2:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABBCCDDEEFFG`.
A snapshot ID with the longer ID format looks like the following:
`snap-78e226633445566ee`.
For more information, see [Announcement: Heads-up – Longer Storage Gateway volume and snapshot IDs coming in
2016](http://forums.aws.amazon.com/ann.jspa?annID=3557).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2013-06-30",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "storagegateway",
global?: false,
protocol: "json",
service_id: "Storage Gateway",
signature_version: "v4",
signing_name: "storagegateway",
target_prefix: "StorageGateway_20130630"
}
end
@doc """
Activates the gateway you previously deployed on your host.
In the activation process, you specify information such as the Region that you
want to use for storing snapshots or tapes, the time zone for scheduled
snapshots the gateway snapshot schedule window, an activation key, and a name
for your gateway. The activation process also associates your gateway with your
account. For more information, see `UpdateGatewayInformation`.
You must turn on the gateway VM before you can activate your gateway.
"""
def activate_gateway(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ActivateGateway", input, options)
end
@doc """
Configures one or more gateway local disks as cache for a gateway.
This operation is only supported in the cached volume, tape, and file gateway
type (see [How Storage Gateway works (architecture)](https://docs.aws.amazon.com/storagegateway/latest/userguide/StorageGatewayConcepts.html).
In the request, you specify the gateway Amazon Resource Name (ARN) to which you
want to add cache, and one or more disk IDs that you want to configure as cache.
"""
def add_cache(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AddCache", input, options)
end
@doc """
Adds one or more tags to the specified resource.
You use tags to add metadata to resources, which you can use to categorize these
resources. For example, you can categorize resources by purpose, owner,
environment, or team. Each tag consists of a key and a value, which you define.
You can add tags to the following Storage Gateway resources:
* Storage gateways of all types
* Storage volumes
* Virtual tapes
* NFS and SMB file shares
* File System associations
You can create a maximum of 50 tags for each resource. Virtual tapes and storage
volumes that are recovered to a new gateway maintain their tags.
"""
def add_tags_to_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AddTagsToResource", input, options)
end
@doc """
Configures one or more gateway local disks as upload buffer for a specified
gateway.
This operation is supported for the stored volume, cached volume, and tape
gateway types.
In the request, you specify the gateway Amazon Resource Name (ARN) to which you
want to add upload buffer, and one or more disk IDs that you want to configure
as upload buffer.
"""
def add_upload_buffer(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AddUploadBuffer", input, options)
end
@doc """
Configures one or more gateway local disks as working storage for a gateway.
This operation is only supported in the stored volume gateway type. This
operation is deprecated in cached volume API version 20120630. Use
`AddUploadBuffer` instead.
Working storage is also referred to as upload buffer. You can also use the
`AddUploadBuffer` operation to add upload buffer to a stored volume gateway.
In the request, you specify the gateway Amazon Resource Name (ARN) to which you
want to add working storage, and one or more disk IDs that you want to configure
as working storage.
"""
def add_working_storage(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AddWorkingStorage", input, options)
end
@doc """
Assigns a tape to a tape pool for archiving.
The tape assigned to a pool is archived in the S3 storage class that is
associated with the pool. When you use your backup application to eject the
tape, the tape is archived directly into the S3 storage class (S3 Glacier or S3
Glacier Deep Archive) that corresponds to the pool.
Valid Values: `GLACIER` | `DEEP_ARCHIVE`
"""
def assign_tape_pool(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AssignTapePool", input, options)
end
@doc """
Associate an Amazon FSx file system with the FSx File Gateway.
After the association process is complete, the file shares on the Amazon FSx
file system are available for access through the gateway. This operation only
supports the FSx File Gateway type.
"""
def associate_file_system(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AssociateFileSystem", input, options)
end
@doc """
Connects a volume to an iSCSI connection and then attaches the volume to the
specified gateway.
Detaching and attaching a volume enables you to recover your data from one
gateway to a different gateway without creating a snapshot. It also makes it
easier to move your volumes from an on-premises gateway to a gateway hosted on
an Amazon EC2 instance.
"""
def attach_volume(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AttachVolume", input, options)
end
@doc """
Cancels archiving of a virtual tape to the virtual tape shelf (VTS) after the
archiving process is initiated.
This operation is only supported in the tape gateway type.
"""
def cancel_archival(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CancelArchival", input, options)
end
@doc """
Cancels retrieval of a virtual tape from the virtual tape shelf (VTS) to a
gateway after the retrieval process is initiated.
The virtual tape is returned to the VTS. This operation is only supported in the
tape gateway type.
"""
def cancel_retrieval(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CancelRetrieval", input, options)
end
@doc """
Creates a cached volume on a specified cached volume gateway.
This operation is only supported in the cached volume gateway type.
Cache storage must be allocated to the gateway before you can create a cached
volume. Use the `AddCache` operation to add cache storage to a gateway.
In the request, you must specify the gateway, size of the volume in bytes, the
iSCSI target name, an IP address on which to expose the target, and a unique
client token. In response, the gateway creates the volume and returns
information about it. This information includes the volume Amazon Resource Name
(ARN), its size, and the iSCSI target ARN that initiators can use to connect to
the volume target.
Optionally, you can provide the ARN for an existing volume as the
`SourceVolumeARN` for this cached volume, which creates an exact copy of the
existing volume’s latest recovery point. The `VolumeSizeInBytes` value must be
equal to or larger than the size of the copied volume, in bytes.
"""
def create_cached_iscsi_volume(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateCachediSCSIVolume", input, options)
end
@doc """
Creates a Network File System (NFS) file share on an existing S3 File Gateway.
In Storage Gateway, a file share is a file system mount point backed by Amazon
S3 cloud storage. Storage Gateway exposes file shares using an NFS interface.
This operation is only supported for S3 File Gateways.
S3 File gateway requires Security Token Service (STS) to be activated to enable
you to create a file share. Make sure STS is activated in the Region you are
creating your S3 File Gateway in. If STS is not activated in the Region,
activate it. For information about how to activate STS, see [Activating and deactivating STS in an
Region](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_enable-regions.html)
in the *Identity and Access Management User Guide*.
S3 File Gateways do not support creating hard or symbolic links on a file share.
"""
def create_nfs_file_share(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateNFSFileShare", input, options)
end
@doc """
Creates a Server Message Block (SMB) file share on an existing S3 File Gateway.
In Storage Gateway, a file share is a file system mount point backed by Amazon
S3 cloud storage. Storage Gateway exposes file shares using an SMB interface.
This operation is only supported for S3 File Gateways.
S3 File Gateways require Security Token Service (STS) to be activated to enable
you to create a file share. Make sure that STS is activated in the Region you
are creating your S3 File Gateway in. If STS is not activated in this Region,
activate it. For information about how to activate STS, see [Activating and deactivating STS in an
Region](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_enable-regions.html)
in the *Identity and Access Management User Guide*.
File gateways don't support creating hard or symbolic links on a file share.
"""
def create_smb_file_share(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateSMBFileShare", input, options)
end
@doc """
Initiates a snapshot of a volume.
Storage Gateway provides the ability to back up point-in-time snapshots of your
data to Amazon Simple Storage (Amazon S3) for durable off-site recovery, and
also import the data to an Amazon Elastic Block Store (EBS) volume in Amazon
Elastic Compute Cloud (EC2). You can take snapshots of your gateway volume on a
scheduled or ad hoc basis. This API enables you to take an ad hoc snapshot. For
more information, see [Editing a snapshot schedule](https://docs.aws.amazon.com/storagegateway/latest/userguide/managing-volumes.html#SchedulingSnapshot).
In the `CreateSnapshot` request, you identify the volume by providing its Amazon
Resource Name (ARN). You must also provide description for the snapshot. When
Storage Gateway takes the snapshot of specified volume, the snapshot and
description appears in the Storage Gateway console. In response, Storage Gateway
returns you a snapshot ID. You can use this snapshot ID to check the snapshot
progress or later use it when you want to create a volume from a snapshot. This
operation is only supported in stored and cached volume gateway type.
To list or delete a snapshot, you must use the Amazon EC2 API. For more
information, see
[DescribeSnapshots](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSnapshots.html) or
[DeleteSnapshot](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DeleteSnapshot.html)
in the *Amazon Elastic Compute Cloud API Reference*.
Volume and snapshot IDs are changing to a longer length ID format. For more
information, see the important note on the
[Welcome](https://docs.aws.amazon.com/storagegateway/latest/APIReference/Welcome.html)
page.
"""
def create_snapshot(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateSnapshot", input, options)
end
@doc """
Initiates a snapshot of a gateway from a volume recovery point.
This operation is only supported in the cached volume gateway type.
A volume recovery point is a point in time at which all data of the volume is
consistent and from which you can create a snapshot. To get a list of volume
recovery point for cached volume gateway, use `ListVolumeRecoveryPoints`.
In the `CreateSnapshotFromVolumeRecoveryPoint` request, you identify the volume
by providing its Amazon Resource Name (ARN). You must also provide a description
for the snapshot. When the gateway takes a snapshot of the specified volume, the
snapshot and its description appear in the Storage Gateway console. In response,
the gateway returns you a snapshot ID. You can use this snapshot ID to check the
snapshot progress or later use it when you want to create a volume from a
snapshot.
To list or delete a snapshot, you must use the Amazon EC2 API. For more
information, see
[DescribeSnapshots](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSnapshots.html) or
[DeleteSnapshot](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DeleteSnapshot.html)
in the *Amazon Elastic Compute Cloud API Reference*.
"""
def create_snapshot_from_volume_recovery_point(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"CreateSnapshotFromVolumeRecoveryPoint",
input,
options
)
end
@doc """
Creates a volume on a specified gateway.
This operation is only supported in the stored volume gateway type.
The size of the volume to create is inferred from the disk size. You can choose
to preserve existing data on the disk, create volume from an existing snapshot,
or create an empty volume. If you choose to create an empty gateway volume, then
any existing data on the disk is erased.
In the request, you must specify the gateway and the disk information on which
you are creating the volume. In response, the gateway creates the volume and
returns volume information such as the volume Amazon Resource Name (ARN), its
size, and the iSCSI target ARN that initiators can use to connect to the volume
target.
"""
def create_stored_iscsi_volume(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateStorediSCSIVolume", input, options)
end
@doc """
Creates a new custom tape pool.
You can use custom tape pool to enable tape retention lock on tapes that are
archived in the custom pool.
"""
def create_tape_pool(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateTapePool", input, options)
end
@doc """
Creates a virtual tape by using your own barcode.
You write data to the virtual tape and then archive the tape. A barcode is
unique and cannot be reused if it has already been used on a tape. This applies
to barcodes used on deleted tapes. This operation is only supported in the tape
gateway type.
Cache storage must be allocated to the gateway before you can create a virtual
tape. Use the `AddCache` operation to add cache storage to a gateway.
"""
def create_tape_with_barcode(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateTapeWithBarcode", input, options)
end
@doc """
Creates one or more virtual tapes.
You write data to the virtual tapes and then archive the tapes. This operation
is only supported in the tape gateway type.
Cache storage must be allocated to the gateway before you can create virtual
tapes. Use the `AddCache` operation to add cache storage to a gateway.
"""
def create_tapes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateTapes", input, options)
end
@doc """
Deletes the automatic tape creation policy of a gateway.
If you delete this policy, new virtual tapes must be created manually. Use the
Amazon Resource Name (ARN) of the gateway in your request to remove the policy.
"""
def delete_automatic_tape_creation_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteAutomaticTapeCreationPolicy", input, options)
end
@doc """
Deletes the bandwidth rate limits of a gateway.
You can delete either the upload and download bandwidth rate limit, or you can
delete both. If you delete only one of the limits, the other limit remains
unchanged. To specify which gateway to work with, use the Amazon Resource Name
(ARN) of the gateway in your request. This operation is supported for the stored
volume, cached volume and tape gateway types.
"""
def delete_bandwidth_rate_limit(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteBandwidthRateLimit", input, options)
end
@doc """
Deletes Challenge-Handshake Authentication Protocol (CHAP) credentials for a
specified iSCSI target and initiator pair.
This operation is supported in volume and tape gateway types.
"""
def delete_chap_credentials(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteChapCredentials", input, options)
end
@doc """
Deletes a file share from an S3 File Gateway.
This operation is only supported for S3 File Gateways.
"""
def delete_file_share(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteFileShare", input, options)
end
@doc """
Deletes a gateway.
To specify which gateway to delete, use the Amazon Resource Name (ARN) of the
gateway in your request. The operation deletes the gateway; however, it does not
delete the gateway virtual machine (VM) from your host computer.
After you delete a gateway, you cannot reactivate it. Completed snapshots of the
gateway volumes are not deleted upon deleting the gateway, however, pending
snapshots will not complete. After you delete a gateway, your next step is to
remove it from your environment.
You no longer pay software charges after the gateway is deleted; however, your
existing Amazon EBS snapshots persist and you will continue to be billed for
these snapshots. You can choose to remove all remaining Amazon EBS snapshots by
canceling your Amazon EC2 subscription. If you prefer not to cancel your Amazon
EC2 subscription, you can delete your snapshots using the Amazon EC2 console.
For more information, see the [Storage Gateway detail page](http://aws.amazon.com/storagegateway).
"""
def delete_gateway(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteGateway", input, options)
end
@doc """
Deletes a snapshot of a volume.
You can take snapshots of your gateway volumes on a scheduled or ad hoc basis.
This API action enables you to delete a snapshot schedule for a volume. For more
information, see [Backing up your volumes](https://docs.aws.amazon.com/storagegateway/latest/userguide/backing-up-volumes.html).
In the `DeleteSnapshotSchedule` request, you identify the volume by providing
its Amazon Resource Name (ARN). This operation is only supported in stored and
cached volume gateway types.
To list or delete a snapshot, you must use the Amazon EC2 API. For more
information, go to
[DescribeSnapshots](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSnapshots.html)
in the *Amazon Elastic Compute Cloud API Reference*.
"""
def delete_snapshot_schedule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteSnapshotSchedule", input, options)
end
@doc """
Deletes the specified virtual tape.
This operation is only supported in the tape gateway type.
"""
def delete_tape(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteTape", input, options)
end
@doc """
Deletes the specified virtual tape from the virtual tape shelf (VTS).
This operation is only supported in the tape gateway type.
"""
def delete_tape_archive(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteTapeArchive", input, options)
end
@doc """
Delete a custom tape pool.
A custom tape pool can only be deleted if there are no tapes in the pool and if
there are no automatic tape creation policies that reference the custom tape
pool.
"""
def delete_tape_pool(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteTapePool", input, options)
end
@doc """
Deletes the specified storage volume that you previously created using the
`CreateCachediSCSIVolume` or `CreateStorediSCSIVolume` API.
This operation is only supported in the cached volume and stored volume types.
For stored volume gateways, the local disk that was configured as the storage
volume is not deleted. You can reuse the local disk to create another storage
volume.
Before you delete a volume, make sure there are no iSCSI connections to the
volume you are deleting. You should also make sure there is no snapshot in
progress. You can use the Amazon Elastic Compute Cloud (Amazon EC2) API to query
snapshots on the volume you are deleting and check the snapshot status. For more
information, go to
[DescribeSnapshots](https://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeSnapshots.html)
in the *Amazon Elastic Compute Cloud API Reference*.
In the request, you must provide the Amazon Resource Name (ARN) of the storage
volume you want to delete.
"""
def delete_volume(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteVolume", input, options)
end
@doc """
Returns information about the most recent high availability monitoring test that
was performed on the host in a cluster.
If a test isn't performed, the status and start time in the response would be
null.
"""
def describe_availability_monitor_test(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeAvailabilityMonitorTest", input, options)
end
@doc """
Returns the bandwidth rate limits of a gateway.
By default, these limits are not set, which means no bandwidth rate limiting is
in effect. This operation is supported for the stored volume, cached volume, and
tape gateway types.
This operation only returns a value for a bandwidth rate limit only if the limit
is set. If no limits are set for the gateway, then this operation returns only
the gateway ARN in the response body. To specify which gateway to describe, use
the Amazon Resource Name (ARN) of the gateway in your request.
"""
def describe_bandwidth_rate_limit(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeBandwidthRateLimit", input, options)
end
@doc """
Returns information about the bandwidth rate limit schedule of a gateway.
By default, gateways do not have bandwidth rate limit schedules, which means no
bandwidth rate limiting is in effect. This operation is supported only in the
volume and tape gateway types.
This operation returns information about a gateway's bandwidth rate limit
schedule. A bandwidth rate limit schedule consists of one or more bandwidth rate
limit intervals. A bandwidth rate limit interval defines a period of time on one
or more days of the week, during which bandwidth rate limits are specified for
uploading, downloading, or both.
A bandwidth rate limit interval consists of one or more days of the week, a
start hour and minute, an ending hour and minute, and bandwidth rate limits for
uploading and downloading
If no bandwidth rate limit schedule intervals are set for the gateway, this
operation returns an empty response. To specify which gateway to describe, use
the Amazon Resource Name (ARN) of the gateway in your request.
"""
def describe_bandwidth_rate_limit_schedule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeBandwidthRateLimitSchedule", input, options)
end
@doc """
Returns information about the cache of a gateway.
This operation is only supported in the cached volume, tape, and file gateway
types.
The response includes disk IDs that are configured as cache, and it includes the
amount of cache allocated and used.
"""
def describe_cache(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeCache", input, options)
end
@doc """
Returns a description of the gateway volumes specified in the request.
This operation is only supported in the cached volume gateway types.
The list of gateway volumes in the request must be from one gateway. In the
response, Storage Gateway returns volume information sorted by volume Amazon
Resource Name (ARN).
"""
def describe_cached_iscsi_volumes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeCachediSCSIVolumes", input, options)
end
@doc """
Returns an array of Challenge-Handshake Authentication Protocol (CHAP)
credentials information for a specified iSCSI target, one for each
target-initiator pair.
This operation is supported in the volume and tape gateway types.
"""
def describe_chap_credentials(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeChapCredentials", input, options)
end
@doc """
Gets the file system association information.
This operation is only supported for FSx File Gateways.
"""
def describe_file_system_associations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeFileSystemAssociations", input, options)
end
@doc """
Returns metadata about a gateway such as its name, network interfaces,
configured time zone, and the state (whether the gateway is running or not).
To specify which gateway to describe, use the Amazon Resource Name (ARN) of the
gateway in your request.
"""
def describe_gateway_information(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeGatewayInformation", input, options)
end
@doc """
Returns your gateway's weekly maintenance start time including the day and time
of the week.
Note that values are in terms of the gateway's time zone.
"""
def describe_maintenance_start_time(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeMaintenanceStartTime", input, options)
end
@doc """
Gets a description for one or more Network File System (NFS) file shares from an
S3 File Gateway.
This operation is only supported for S3 File Gateways.
"""
def describe_nfs_file_shares(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeNFSFileShares", input, options)
end
@doc """
Gets a description for one or more Server Message Block (SMB) file shares from a
S3 File Gateway.
This operation is only supported for S3 File Gateways.
"""
def describe_smb_file_shares(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeSMBFileShares", input, options)
end
@doc """
Gets a description of a Server Message Block (SMB) file share settings from a
file gateway.
This operation is only supported for file gateways.
"""
def describe_smb_settings(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeSMBSettings", input, options)
end
@doc """
Describes the snapshot schedule for the specified gateway volume.
The snapshot schedule information includes intervals at which snapshots are
automatically initiated on the volume. This operation is only supported in the
cached volume and stored volume types.
"""
def describe_snapshot_schedule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeSnapshotSchedule", input, options)
end
@doc """
Returns the description of the gateway volumes specified in the request.
The list of gateway volumes in the request must be from one gateway. In the
response, Storage Gateway returns volume information sorted by volume ARNs. This
operation is only supported in stored volume gateway type.
"""
def describe_stored_iscsi_volumes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeStorediSCSIVolumes", input, options)
end
@doc """
Returns a description of specified virtual tapes in the virtual tape shelf
(VTS).
This operation is only supported in the tape gateway type.
If a specific `TapeARN` is not specified, Storage Gateway returns a description
of all virtual tapes found in the VTS associated with your account.
"""
def describe_tape_archives(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeTapeArchives", input, options)
end
@doc """
Returns a list of virtual tape recovery points that are available for the
specified tape gateway.
A recovery point is a point-in-time view of a virtual tape at which all the data
on the virtual tape is consistent. If your gateway crashes, virtual tapes that
have recovery points can be recovered to a new gateway. This operation is only
supported in the tape gateway type.
"""
def describe_tape_recovery_points(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeTapeRecoveryPoints", input, options)
end
@doc """
Returns a description of the specified Amazon Resource Name (ARN) of virtual
tapes.
If a `TapeARN` is not specified, returns a description of all virtual tapes
associated with the specified gateway. This operation is only supported in the
tape gateway type.
"""
def describe_tapes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeTapes", input, options)
end
@doc """
Returns information about the upload buffer of a gateway.
This operation is supported for the stored volume, cached volume, and tape
gateway types.
The response includes disk IDs that are configured as upload buffer space, and
it includes the amount of upload buffer space allocated and used.
"""
def describe_upload_buffer(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeUploadBuffer", input, options)
end
@doc """
Returns a description of virtual tape library (VTL) devices for the specified
tape gateway.
In the response, Storage Gateway returns VTL device information.
This operation is only supported in the tape gateway type.
"""
def describe_vtl_devices(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeVTLDevices", input, options)
end
@doc """
Returns information about the working storage of a gateway.
This operation is only supported in the stored volumes gateway type. This
operation is deprecated in cached volumes API version (20120630). Use
DescribeUploadBuffer instead.
Working storage is also referred to as upload buffer. You can also use the
DescribeUploadBuffer operation to add upload buffer to a stored volume gateway.
The response includes disk IDs that are configured as working storage, and it
includes the amount of working storage allocated and used.
"""
def describe_working_storage(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeWorkingStorage", input, options)
end
@doc """
Disconnects a volume from an iSCSI connection and then detaches the volume from
the specified gateway.
Detaching and attaching a volume enables you to recover your data from one
gateway to a different gateway without creating a snapshot. It also makes it
easier to move your volumes from an on-premises gateway to a gateway hosted on
an Amazon EC2 instance. This operation is only supported in the volume gateway
type.
"""
def detach_volume(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DetachVolume", input, options)
end
@doc """
Disables a tape gateway when the gateway is no longer functioning.
For example, if your gateway VM is damaged, you can disable the gateway so you
can recover virtual tapes.
Use this operation for a tape gateway that is not reachable or not functioning.
This operation is only supported in the tape gateway type.
After a gateway is disabled, it cannot be enabled.
"""
def disable_gateway(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisableGateway", input, options)
end
@doc """
Disassociates an Amazon FSx file system from the specified gateway.
After the disassociation process finishes, the gateway can no longer access the
Amazon FSx file system. This operation is only supported in the FSx File Gateway
type.
"""
def disassociate_file_system(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisassociateFileSystem", input, options)
end
@doc """
Adds a file gateway to an Active Directory domain.
This operation is only supported for file gateways that support the SMB file
protocol.
"""
def join_domain(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "JoinDomain", input, options)
end
@doc """
Lists the automatic tape creation policies for a gateway.
If there are no automatic tape creation policies for the gateway, it returns an
empty list.
This operation is only supported for tape gateways.
"""
def list_automatic_tape_creation_policies(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListAutomaticTapeCreationPolicies", input, options)
end
@doc """
Gets a list of the file shares for a specific S3 File Gateway, or the list of
file shares that belong to the calling user account.
This operation is only supported for S3 File Gateways.
"""
def list_file_shares(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListFileShares", input, options)
end
@doc """
Gets a list of `FileSystemAssociationSummary` objects.
Each object contains a summary of a file system association. This operation is
only supported for FSx File Gateways.
"""
def list_file_system_associations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListFileSystemAssociations", input, options)
end
@doc """
Lists gateways owned by an account in an Region specified in the request.
The returned list is ordered by gateway Amazon Resource Name (ARN).
By default, the operation returns a maximum of 100 gateways. This operation
supports pagination that allows you to optionally reduce the number of gateways
returned in a response.
If you have more gateways than are returned in a response (that is, the response
returns only a truncated list of your gateways), the response contains a marker
that you can specify in your next request to fetch the next page of gateways.
"""
def list_gateways(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListGateways", input, options)
end
@doc """
Returns a list of the gateway's local disks.
To specify which gateway to describe, you use the Amazon Resource Name (ARN) of
the gateway in the body of the request.
The request returns a list of all disks, specifying which are configured as
working storage, cache storage, or stored volume or not configured at all. The
response includes a `DiskStatus` field. This field can have a value of present
(the disk is available to use), missing (the disk is no longer connected to the
gateway), or mismatch (the disk node is occupied by a disk that has incorrect
metadata or the disk content is corrupted).
"""
def list_local_disks(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListLocalDisks", input, options)
end
@doc """
Lists the tags that have been added to the specified resource.
This operation is supported in storage gateways of all types.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Lists custom tape pools.
You specify custom tape pools to list by specifying one or more custom tape pool
Amazon Resource Names (ARNs). If you don't specify a custom tape pool ARN, the
operation lists all custom tape pools.
This operation supports pagination. You can optionally specify the `Limit`
parameter in the body to limit the number of tape pools in the response. If the
number of tape pools returned in the response is truncated, the response
includes a `Marker` element that you can use in your subsequent request to
retrieve the next set of tape pools.
"""
def list_tape_pools(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTapePools", input, options)
end
@doc """
Lists virtual tapes in your virtual tape library (VTL) and your virtual tape
shelf (VTS).
You specify the tapes to list by specifying one or more tape Amazon Resource
Names (ARNs). If you don't specify a tape ARN, the operation lists all virtual
tapes in both your VTL and VTS.
This operation supports pagination. By default, the operation returns a maximum
of up to 100 tapes. You can optionally specify the `Limit` parameter in the body
to limit the number of tapes in the response. If the number of tapes returned in
the response is truncated, the response includes a `Marker` element that you can
use in your subsequent request to retrieve the next set of tapes. This operation
is only supported in the tape gateway type.
"""
def list_tapes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTapes", input, options)
end
@doc """
Lists iSCSI initiators that are connected to a volume.
You can use this operation to determine whether a volume is being used or not.
This operation is only supported in the cached volume and stored volume gateway
types.
"""
def list_volume_initiators(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListVolumeInitiators", input, options)
end
@doc """
Lists the recovery points for a specified gateway.
This operation is only supported in the cached volume gateway type.
Each cache volume has one recovery point. A volume recovery point is a point in
time at which all data of the volume is consistent and from which you can create
a snapshot or clone a new cached volume from a source volume. To create a
snapshot from a volume recovery point use the
`CreateSnapshotFromVolumeRecoveryPoint` operation.
"""
def list_volume_recovery_points(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListVolumeRecoveryPoints", input, options)
end
@doc """
Lists the iSCSI stored volumes of a gateway.
Results are sorted by volume ARN. The response includes only the volume ARNs. If
you want additional volume information, use the `DescribeStorediSCSIVolumes` or
the `DescribeCachediSCSIVolumes` API.
The operation supports pagination. By default, the operation returns a maximum
of up to 100 volumes. You can optionally specify the `Limit` field in the body
to limit the number of volumes in the response. If the number of volumes
returned in the response is truncated, the response includes a Marker field. You
can use this Marker value in your subsequent request to retrieve the next set of
volumes. This operation is only supported in the cached volume and stored volume
gateway types.
"""
def list_volumes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListVolumes", input, options)
end
@doc """
Sends you notification through CloudWatch Events when all files written to your
file share have been uploaded to Amazon S3.
Storage Gateway can send a notification through Amazon CloudWatch Events when
all files written to your file share up to that point in time have been uploaded
to Amazon S3. These files include files written to the file share up to the time
that you make a request for notification. When the upload is done, Storage
Gateway sends you notification through an Amazon CloudWatch Event. You can
configure CloudWatch Events to send the notification through event targets such
as Amazon SNS or Lambda function. This operation is only supported for S3 File
Gateways.
For more information, see [Getting file upload notification](https://docs.aws.amazon.com/storagegateway/latest/userguide/monitoring-file-gateway.html#get-upload-notification)
in the *Storage Gateway User Guide*.
"""
def notify_when_uploaded(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "NotifyWhenUploaded", input, options)
end
@doc """
Refreshes the cached inventory of objects for the specified file share.
This operation finds objects in the Amazon S3 bucket that were added, removed,
or replaced since the gateway last listed the bucket's contents and cached the
results. This operation does not import files into the S3 File Gateway cache
storage. It only updates the cached inventory to reflect changes in the
inventory of the objects in the S3 bucket. This operation is only supported in
the S3 File Gateway types.
You can subscribe to be notified through an Amazon CloudWatch event when your
`RefreshCache` operation completes. For more information, see [Getting notified about file
operations](https://docs.aws.amazon.com/storagegateway/latest/userguide/monitoring-file-gateway.html#get-notification)
in the *Storage Gateway User Guide*. This operation is Only supported for S3
File Gateways.
When this API is called, it only initiates the refresh operation. When the API
call completes and returns a success code, it doesn't necessarily mean that the
file refresh has completed. You should use the refresh-complete notification to
determine that the operation has completed before you check for new files on the
gateway file share. You can subscribe to be notified through a CloudWatch event
when your `RefreshCache` operation completes.
Throttle limit: This API is asynchronous, so the gateway will accept no more
than two refreshes at any time. We recommend using the refresh-complete
CloudWatch event notification before issuing additional requests. For more
information, see [Getting notified about file operations](https://docs.aws.amazon.com/storagegateway/latest/userguide/monitoring-file-gateway.html#get-notification)
in the *Storage Gateway User Guide*.
If you invoke the RefreshCache API when two requests are already being
processed, any new request will cause an `InvalidGatewayRequestException` error
because too many requests were sent to the server.
For more information, see [Getting notified about file operations](https://docs.aws.amazon.com/storagegateway/latest/userguide/monitoring-file-gateway.html#get-notification)
in the *Storage Gateway User Guide*.
"""
def refresh_cache(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RefreshCache", input, options)
end
@doc """
Removes one or more tags from the specified resource.
This operation is supported in storage gateways of all types.
"""
def remove_tags_from_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RemoveTagsFromResource", input, options)
end
@doc """
Resets all cache disks that have encountered an error and makes the disks
available for reconfiguration as cache storage.
If your cache disk encounters an error, the gateway prevents read and write
operations on virtual tapes in the gateway. For example, an error can occur when
a disk is corrupted or removed from the gateway. When a cache is reset, the
gateway loses its cache storage. At this point, you can reconfigure the disks as
cache disks. This operation is only supported in the cached volume and tape
types.
If the cache disk you are resetting contains data that has not been uploaded to
Amazon S3 yet, that data can be lost. After you reset cache disks, there will be
no configured cache disks left in the gateway, so you must configure at least
one new cache disk for your gateway to function properly.
"""
def reset_cache(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ResetCache", input, options)
end
@doc """
Retrieves an archived virtual tape from the virtual tape shelf (VTS) to a tape
gateway.
Virtual tapes archived in the VTS are not associated with any gateway. However
after a tape is retrieved, it is associated with a gateway, even though it is
also listed in the VTS, that is, archive. This operation is only supported in
the tape gateway type.
Once a tape is successfully retrieved to a gateway, it cannot be retrieved again
to another gateway. You must archive the tape again before you can retrieve it
to another gateway. This operation is only supported in the tape gateway type.
"""
def retrieve_tape_archive(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RetrieveTapeArchive", input, options)
end
@doc """
Retrieves the recovery point for the specified virtual tape.
This operation is only supported in the tape gateway type.
A recovery point is a point in time view of a virtual tape at which all the data
on the tape is consistent. If your gateway crashes, virtual tapes that have
recovery points can be recovered to a new gateway.
The virtual tape can be retrieved to only one gateway. The retrieved tape is
read-only. The virtual tape can be retrieved to only a tape gateway. There is no
charge for retrieving recovery points.
"""
def retrieve_tape_recovery_point(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RetrieveTapeRecoveryPoint", input, options)
end
@doc """
Sets the password for your VM local console.
When you log in to the local console for the first time, you log in to the VM
with the default credentials. We recommend that you set a new password. You
don't need to know the default password to set a new password.
"""
def set_local_console_password(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SetLocalConsolePassword", input, options)
end
@doc """
Sets the password for the guest user `smbguest`.
The `smbguest` user is the user when the authentication method for the file
share is set to `GuestAccess`. This operation only supported for S3 File
Gateways
"""
def set_smb_guest_password(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SetSMBGuestPassword", input, options)
end
@doc """
Shuts down a gateway.
To specify which gateway to shut down, use the Amazon Resource Name (ARN) of the
gateway in the body of your request.
The operation shuts down the gateway service component running in the gateway's
virtual machine (VM) and not the host VM.
If you want to shut down the VM, it is recommended that you first shut down the
gateway component in the VM to avoid unpredictable conditions.
After the gateway is shutdown, you cannot call any other API except
`StartGateway`, `DescribeGatewayInformation`, and `ListGateways`. For more
information, see `ActivateGateway`. Your applications cannot read from or write
to the gateway's storage volumes, and there are no snapshots taken.
When you make a shutdown request, you will get a `200 OK` success response
immediately. However, it might take some time for the gateway to shut down. You
can call the `DescribeGatewayInformation` API to check the status. For more
information, see `ActivateGateway`.
If do not intend to use the gateway again, you must delete the gateway (using
`DeleteGateway`) to no longer pay software charges associated with the gateway.
"""
def shutdown_gateway(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ShutdownGateway", input, options)
end
@doc """
Start a test that verifies that the specified gateway is configured for High
Availability monitoring in your host environment.
This request only initiates the test and that a successful response only
indicates that the test was started. It doesn't indicate that the test passed.
For the status of the test, invoke the `DescribeAvailabilityMonitorTest` API.
Starting this test will cause your gateway to go offline for a brief period.
"""
def start_availability_monitor_test(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartAvailabilityMonitorTest", input, options)
end
@doc """
Starts a gateway that you previously shut down (see `ShutdownGateway`).
After the gateway starts, you can then make other API calls, your applications
can read from or write to the gateway's storage volumes and you will be able to
take snapshot backups.
When you make a request, you will get a 200 OK success response immediately.
However, it might take some time for the gateway to be ready. You should call
`DescribeGatewayInformation` and check the status before making any additional
API calls. For more information, see `ActivateGateway`.
To specify which gateway to start, use the Amazon Resource Name (ARN) of the
gateway in your request.
"""
def start_gateway(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartGateway", input, options)
end
@doc """
Updates the automatic tape creation policy of a gateway.
Use this to update the policy with a new set of automatic tape creation rules.
This is only supported for tape gateways.
By default, there is no automatic tape creation policy.
A gateway can have only one automatic tape creation policy.
"""
def update_automatic_tape_creation_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateAutomaticTapeCreationPolicy", input, options)
end
@doc """
Updates the bandwidth rate limits of a gateway.
You can update both the upload and download bandwidth rate limit or specify only
one of the two. If you don't set a bandwidth rate limit, the existing rate limit
remains. This operation is supported for the stored volume, cached volume, and
tape gateway types.
By default, a gateway's bandwidth rate limits are not set. If you don't set any
limit, the gateway does not have any limitations on its bandwidth usage and
could potentially use the maximum available bandwidth.
To specify which gateway to update, use the Amazon Resource Name (ARN) of the
gateway in your request.
"""
def update_bandwidth_rate_limit(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateBandwidthRateLimit", input, options)
end
@doc """
Updates the bandwidth rate limit schedule for a specified gateway.
By default, gateways do not have bandwidth rate limit schedules, which means no
bandwidth rate limiting is in effect. Use this to initiate or update a gateway's
bandwidth rate limit schedule. This operation is supported in the volume and
tape gateway types.
"""
def update_bandwidth_rate_limit_schedule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateBandwidthRateLimitSchedule", input, options)
end
@doc """
Updates the Challenge-Handshake Authentication Protocol (CHAP) credentials for a
specified iSCSI target.
By default, a gateway does not have CHAP enabled; however, for added security,
you might use it. This operation is supported in the volume and tape gateway
types.
When you update CHAP credentials, all existing connections on the target are
closed and initiators must reconnect with the new credentials.
"""
def update_chap_credentials(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateChapCredentials", input, options)
end
@doc """
Updates a file system association.
This operation is only supported in the FSx File Gateways.
"""
def update_file_system_association(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateFileSystemAssociation", input, options)
end
@doc """
Updates a gateway's metadata, which includes the gateway's name and time zone.
To specify which gateway to update, use the Amazon Resource Name (ARN) of the
gateway in your request.
For gateways activated after September 2, 2015, the gateway's ARN contains the
gateway ID rather than the gateway name. However, changing the name of the
gateway has no effect on the gateway's ARN.
"""
def update_gateway_information(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateGatewayInformation", input, options)
end
@doc """
Updates the gateway virtual machine (VM) software.
The request immediately triggers the software update.
When you make this request, you get a `200 OK` success response immediately.
However, it might take some time for the update to complete. You can call
`DescribeGatewayInformation` to verify the gateway is in the `STATE_RUNNING`
state.
A software update forces a system restart of your gateway. You can minimize the
chance of any disruption to your applications by increasing your iSCSI
Initiators' timeouts. For more information about increasing iSCSI Initiator
timeouts for Windows and Linux, see [Customizing your Windows iSCSI settings](https://docs.aws.amazon.com/storagegateway/latest/userguide/ConfiguringiSCSIClientInitiatorWindowsClient.html#CustomizeWindowsiSCSISettings)
and [Customizing your Linux iSCSI settings](https://docs.aws.amazon.com/storagegateway/latest/userguide/ConfiguringiSCSIClientInitiatorRedHatClient.html#CustomizeLinuxiSCSISettings),
respectively.
"""
def update_gateway_software_now(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateGatewaySoftwareNow", input, options)
end
@doc """
Updates a gateway's weekly maintenance start time information, including day and
time of the week.
The maintenance time is the time in your gateway's time zone.
"""
def update_maintenance_start_time(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateMaintenanceStartTime", input, options)
end
@doc """
Updates a Network File System (NFS) file share.
This operation is only supported in S3 File Gateways.
To leave a file share field unchanged, set the corresponding input field to
null.
Updates the following file share settings:
* Default storage class for your S3 bucket
* Metadata defaults for your S3 bucket
* Allowed NFS clients for your file share
* Squash settings
* Write status of your file share
"""
def update_nfs_file_share(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateNFSFileShare", input, options)
end
@doc """
Updates a Server Message Block (SMB) file share.
This operation is only supported for S3 File Gateways.
To leave a file share field unchanged, set the corresponding input field to
null.
File gateways require Security Token Service (STS) to be activated to enable you
to create a file share. Make sure that STS is activated in the Region you are
creating your file gateway in. If STS is not activated in this Region, activate
it. For information about how to activate STS, see [Activating and deactivating STS in an
Region](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_enable-regions.html)
in the *Identity and Access Management User Guide*.
File gateways don't support creating hard or symbolic links on a file share.
"""
def update_smb_file_share(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateSMBFileShare", input, options)
end
@doc """
Controls whether the shares on an S3 File Gateway are visible in a net view or
browse list.
The operation is only supported for S3 File Gateways.
"""
def update_smb_file_share_visibility(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateSMBFileShareVisibility", input, options)
end
@doc """
Updates the SMB security strategy on a file gateway.
This action is only supported in file gateways.
This API is called Security level in the User Guide.
A higher security level can affect performance of the gateway.
"""
def update_smb_security_strategy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateSMBSecurityStrategy", input, options)
end
@doc """
Updates a snapshot schedule configured for a gateway volume.
This operation is only supported in the cached volume and stored volume gateway
types.
The default snapshot schedule for volume is once every 24 hours, starting at the
creation time of the volume. You can use this API to change the snapshot
schedule configured for the volume.
In the request you must identify the gateway volume whose snapshot schedule you
want to update, and the schedule information, including when you want the
snapshot to begin on a day and the frequency (in hours) of snapshots.
"""
def update_snapshot_schedule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateSnapshotSchedule", input, options)
end
@doc """
Updates the type of medium changer in a tape gateway.
When you activate a tape gateway, you select a medium changer type for the tape
gateway. This operation enables you to select a different type of medium changer
after a tape gateway is activated. This operation is only supported in the tape
gateway type.
"""
def update_vtl_device_type(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateVTLDeviceType", input, options)
end
end
|
lib/aws/generated/storage_gateway.ex
| 0.908734
| 0.596903
|
storage_gateway.ex
|
starcoder
|
defmodule Day7 do
def run(lines) do
with rules = parse_rules(lines),
p1 = part1(rules),
p2 = bags_inside("shiny gold", rules) do
"part1: #{p1} part2: #{p2}"
end
end
def part1(rules) do
rules
|> Enum.count(fn {color, _} -> can_contain(color, "shiny gold", rules) end)
end
@doc """
Returns the number of bags inside a bag of a given
color, not counting the outer bag itself.
"""
def bags_inside(color, rules) do
# For one item in a rule, how many bags?
# This includes the bag inself and everything
# inside it.
bags_for_rule_item = fn {count, inner_color} ->
count * (1 + bags_inside(inner_color, rules))
end
# Sum the bags for each item in the rule.
rules[color]
|> Enum.map(bags_for_rule_item)
|> Enum.sum()
end
def can_contain(outer_color, inner_color, rules) do
# Can one rule item contain the color, either
# by specifying that color directly, or by
# specifying a color that contains it?
rule_item_can_contain_it = fn {_, content_color} ->
content_color == inner_color or
can_contain(content_color, inner_color, rules)
end
rules[outer_color]
|> Enum.any?(rule_item_can_contain_it)
end
@spec parse_rules([String.t()]) :: %{String.t() => [{integer, String.t()}]}
def parse_rules(lines) do
for line <- lines,
into: %{} do
parse_rule(line)
end
end
@doc """
Parses one contents line
## Examples
iex> Day7.parse_rule("light red bags contain 1 bright white bag, 2 muted yellow bags.")
{"light red", [{1, "bright white"}, {2, "muted yellow"}]}
iex> Day7.parse_rule("faded blue bags contain no other bags.")
{"faded blue", []}
"""
@spec parse_rule(String.t()) :: {String.t(), [{integer, String.t()}]}
def parse_rule(line) do
with [_, lhs, rhs] = Regex.run(~r{^(.*) bags contain (.*)}, line) do
{lhs, parse_rhs(rhs)}
end
end
@spec parse_rhs(String.t()) :: [{integer, String.t()}]
def parse_rhs(rhs) do
if String.contains?(rhs, "no other bag") do
[]
else
for item <- String.split(rhs, ", ") do
with [_, count_str, color] = Regex.run(~r{([0-9]+) (.*) bag}, item) do
{String.to_integer(count_str), color}
end
end
end
end
end
|
elixir_advent/lib/day7.ex
| 0.737536
| 0.427516
|
day7.ex
|
starcoder
|
defmodule VintageNet.PowerManager do
@moduledoc """
This is a behaviour for implementing platform-specific power management.
From VintageNet's point of view, network devices have the following
lifecycle:
```
off ---> on ---> powering-off ---> off
```
Power management does not necessarily mean controlling the power. The end
effect should be similar, since VintageNet will try to toggle the power off
and on if the network interface doesn't seem to be working. For example,
unloading the kernel module for the network device on "power off" and loading
it on "power on" may have the desired effect of getting a network interface
unstuck.
When a device is "on", VintageNet expects to be regularly told that the
device is working ok. Working ok is device dependent, but could be something
like the device has transmitted and received data. If VintageNet is not told
that the device is working for a long enough time, it will reset the device
by powering it off and then back on again.
VintageNet calls functions here based on how it wants to transition a device.
VintageNet maintains the device's power status internally, so implementations
can blindly do what VintageNet tells them too in most cases. Powering on and
off can be asynchronous to these function calls. VintageNet uses the presence
of the networking interface (like "wlan0") to determine when the device is
really available for networking.
The following timeouts are important to consider (in milliseconds):
1. `time_to_power_off`
2. `power_on_hold_time`
3. `min_power_off_time`
4. `watchdog_timeout`
The `time_to_power_off` specifies the time in the `powering-off` state. This
is the maximum time to allow for a graceful shutdown. VintageNet won't bother
the device until that time has expired. That means that if there's a request
to use the device, it will wait the `powering-off` time before calling
`finish_power_off` and then it will power the device back on. Device app
notes may have recommendations for this time.
The `power_on_hold_time` specifies how much time a device should be in the
`powered-on` state before it is ok to power off again. This allows devices
some time to initialize and recover on their own.
The `min_power_off_time` specifies how long the device should remain powered
off before it is powered back on.
Finally, `watchdog_timeout` specifies how long to wait between notifications
that the device is ok. Code reports that a device is ok by calling
`VintageNet.PowerManager.PMControl.pet_watchdog/1`.
While normal Erlang supervision expects that it can restart processes
immediately and without regard to how long they have been running, bad things
can happen to hardware if too aggressively restarted. Devices also initialize
asynchronously so it's hard to know when they're fully available and some
flakiness may be naturally due to VintageNet not knowing how to wait for a
component to finish initialization. Please review your network device's power
management guidelines before too aggressively reducing hold times. Cellular
devices, in particular, want to signal their disconnection from the network
to the tower and flush any unsaved configuration changes to Flash before
power removal.
Here's an example for a cellular device with a reset line connected to it:
* `power_on` - De-assert the reset line. Return a `power_on_hold_time` of 10
minutes
* `start_powering_off` - Open the UART and send the power down command to the
modem. Return a `time_to_power_off` of 1 minute.
* `power_off` - Assert the reset line and return that power shouldn't be turned
back on for another 10 seconds.
PowerManager implementation lifetimes are the same as VintageNet's. In other
words, they start and end with VintageNet. This is unlike a network interface
which runs only as its existence and configuration allow. As such, VintageNet
needs to know about all PowerManager implementations in its application
environment. For example, add something like this to your `config.exs`:
```elixir
config :vintage_net,
power_managers: [{MyCellularPM, [ifname: "ppp0", watchdog_timeout: 60_000, reset_gpio: 123]}]
```
Each tuple is the implementation's module name and init arguments. VintageNet
requires `:ifname` to be set. If you're managing the power for an interface
with a dynamic name, enable predictable interface naming with `VintageNet`
and use that name. The `watchdog_timeout` parameter is optional and defaults
to one minute.
"""
@doc """
Initialize state for managing the power to the specified interface
This is called on start and if the power management GenServer restarts. It
should not assume that hardware is powered down.
IMPORTANT: VintageNet assumes that `init/1` runs quickly and succeeds. Errors
and exceptions from calling `init/1` are handled by disabling the PowerManager.
The reason is that VintageNet has no knowledge on how to recover and disabling
a power manager was deemed less bad that having supervision tree failures
propagate upwards to terminate VintageNet. Messages are logged if this does
happen.
"""
@callback init(args :: keyword()) :: {:ok, state :: any()}
@doc """
Power on the hardware for a network interface
The function should turn on power rails, deassert reset lines, load kernel
modules or do whatever else is necessary to make the interface show up in
Linux.
Failure handling is not supported by VintageNet yet, so if power up can fail
and the right handling for that is to try again later, then this function
should do that.
It is ok for this function to return immediately. When the network interface
appears, VintageNet will start trying to use it.
The return tuple should include the number of milliseconds VintageNet should
wait before trying to power down the module again. This value should be
sufficiently large to avoid getting into loops where VintageNet gives up on a
network interface before it has initialized. 10 minutes (600,000 milliseconds),
for example, is a reasonable setting.
"""
@callback power_on(state :: any()) ::
{:ok, next_state :: any(), hold_time :: non_neg_integer()}
@doc """
Start powering off the hardware for a network interface
This function should start a graceful shutdown of the network interface
hardware. It may return immediately. The return value specifies how long in
milliseconds VintageNet should wait before calling `power_off/2`. The idea is
that a graceful power off should be allowed some time to complete, but not
forever.
"""
@callback start_powering_off(state :: any()) ::
{:ok, next_state :: any(), time_to_power_off :: non_neg_integer()}
@doc """
Power off the hardware
This function should finish powering off the network interface hardware. Since
this is called after the graceful power down should have completed, it should
forcefully turn off the power to the hardware.
The implementation also returns a time that power must remain off, in milliseconds.
`power_on/1` won't be called until that time expires.
"""
@callback power_off(state :: any()) ::
{:ok, next_state :: any(), min_off_time :: non_neg_integer()}
@doc """
Handle other messages
All unknown messages sent to the power management `GenServer` come here. This
callback is similar to `c:GenServer.handle_info/2`.
To receive your own messages here, send them to `self()` in code run in any
of the other callbacks. Another option is to call
`VintageNet.PowerManager.PMControl.send_message/2`
"""
@callback handle_info(msg :: any(), state :: any()) :: {:noreply, new_state :: any()}
end
|
lib/vintage_net/power_manager.ex
| 0.901402
| 0.889912
|
power_manager.ex
|
starcoder
|
defmodule Mongo.Find do
@moduledoc """
Find operation on MongoDB
"""
use Mongo.Helpers
defstruct [
mongo: nil,
collection: nil,
selector: %{},
projector: %{},
batchSize: 0,
skip: 0,
opts: %{},
mods: %{}]
@doc """
Creates a new find operation.
Not to be used directly, prefer `Mongo.Collection.find/3`
"""
def new(collection, jsString, projector) when is_binary(jsString), do: new(collection, %{'$where': jsString}, projector)
def new(collection, selector, projector) do
%__MODULE__{collection: collection, selector: selector, projector: projector, opts: collection |> Mongo.Collection.read_opts}
end
@doc """
Sets where MongoDB begins returning results
Must be run before executing the query
iex> Mongo.connect.%@m{"test"}.collection("anycoll").find.skip(1).toArray |> Enum.count
5
iex> Mongo.connect.%@m{"test"}.collection("anycoll").find.skip(2).toArray |> Enum.count
4
"""
def skip(find, skip), do: %__MODULE__{find| skip: skip}
@doc """
Limits the number of documents to the query.
Must be run before executing the query
"""
def limit(find, limit) when is_integer(limit), do: %__MODULE__{find| batchSize: -limit}
@doc """
Executes the query and returns a `%Mongo.Cursor{}`
"""
def exec(find) do
Mongo.Cursor.exec(find.collection, Mongo.Request.query(find), find.batchSize)
end
@doc """
Runs the explain operator that provides information on the query plan
"""
def explain(find) do
find |> addSpecial(:'$explain', 1) |> Enum.at(0)
end
@doc """
Add hint opperator that forces the query optimizer to use a specific index to fulfill the query
"""
def hint(f, hints)
def hint(f, indexName) when is_atom(indexName), do: f |> addSpecial(:'$hint', indexName)
def hint(f, hints) when is_map(hints), do: f |> addSpecial(:'$hint', hints)
def sort(f, opts) when is_map(opts) or is_list(opts), do: f |> addSpecial(:"$orderby", opts)
@doc """
Sets query options
Defaults option set is equivalent of calling:
Find.opts(
awaitdata: false
nocursortimeout: false
slaveok: true
tailablecursor: false)
"""
def opts(find, options), do: %__MODULE__{find| opts: options}
def addSpecial(find, k, v) do
%__MODULE__{find| mods: Map.put(find.mods, k, v)}
end
defimpl Enumerable, for: Mongo.Find do
@doc """
Executes the query and reduce retrieved documents into a value
"""
def reduce(find, acc, reducer) do
case Mongo.Find.exec(find) do
%Mongo.Cursor{}=cursor ->
case Enumerable.reduce(cursor, {:cont, acc},
fn(docs, acc)->
case Enumerable.reduce(docs, acc, reducer) do
{:done, acc} -> {:cont, {:cont, acc}}
{:halted, acc} -> {:halt, acc}
{:suspended, acc} -> {:suspend, acc}
error -> {:halt, error}
end
end) do
{:done, {:cont, acc}} -> {:done, acc}
other -> other
end
error ->
case error do
{:error, msg} -> raise Mongo.Bang, msg: msg, acc: acc
%Mongo.Error{msg: msg, acc: acc} -> raise Mongo.Bang, msg: msg, acc: acc
end
end
end
@doc """
Counts number of documents to be retreived
"""
def count(find) do
case Mongo.Collection.count(find.collection, find.selector, Map.take(find, [:skip, :limit])) do
%Mongo.Error{} -> -1
n -> n
end
end
@doc """
Not implemented
"""
def member?(_, _), do: :not_implemented
@doc false
#Not implemented
def slice(_), do: {:error, __MODULE__}
end
end
|
lib/mongo_find.ex
| 0.803714
| 0.414632
|
mongo_find.ex
|
starcoder
|
defimpl Timex.Protocol, for: DateTime do
@moduledoc """
A type which represents a date and time with timezone information (optional, UTC will
be assumed for date/times with no timezone information provided).
Functions that produce time intervals use UNIX epoch (or simly Epoch) as the
default reference date. Epoch is defined as UTC midnight of January 1, 1970.
Time intervals in this module don't account for leap seconds.
"""
import Timex.Macros
use Timex.Constants
alias Timex.{Duration, AmbiguousDateTime}
alias Timex.{Timezone, TimezoneInfo}
def to_julian(%DateTime{:year => y, :month => m, :day => d}) do
Timex.Calendar.Julian.julian_date(y, m, d)
end
def to_gregorian_seconds(date) do
with {s, _} <- Timex.DateTime.to_gregorian_seconds(date), do: s
end
def to_gregorian_microseconds(%DateTime{} = date) do
with {s, us} <- Timex.DateTime.to_gregorian_seconds(date), do: s * (1_000 * 1_000) + us
end
def to_unix(date), do: DateTime.to_unix(date)
def to_date(date), do: DateTime.to_date(date)
def to_datetime(%DateTime{time_zone: timezone} = d, timezone),
do: d
def to_datetime(%DateTime{time_zone: tz} = d, %TimezoneInfo{full_name: tz}),
do: d
def to_datetime(%DateTime{} = d, timezone) do
Timezone.convert(d, timezone)
end
def to_naive_datetime(%DateTime{} = d) do
# NOTE: For legacy reasons we shift DateTimes to UTC when making them naive,
# but the standard library just drops the timezone info
d
|> Timex.DateTime.shift_zone!("Etc/UTC", Timex.Timezone.Database)
|> DateTime.to_naive()
end
def to_erl(%DateTime{} = d) do
{{d.year, d.month, d.day}, {d.hour, d.minute, d.second}}
end
def century(%DateTime{:year => year}), do: Timex.century(year)
def is_leap?(%DateTime{year: year}), do: :calendar.is_leap_year(year)
def beginning_of_day(%DateTime{time_zone: time_zone, microsecond: {_, precision}} = datetime) do
us = Timex.DateTime.Helpers.construct_microseconds(0, precision)
time = Timex.Time.new!(0, 0, 0, us)
with {:ok, datetime} <-
Timex.DateTime.new(
DateTime.to_date(datetime),
time,
time_zone,
Timex.Timezone.Database
) do
datetime
else
{:gap, _a, b} ->
# Beginning of the day is after the gap
b
{:ambiguous, _a, b} ->
# Choose the latter of the ambiguous times
b
end
end
def end_of_day(%DateTime{time_zone: time_zone, microsecond: {_, precision}} = datetime) do
us = Timex.DateTime.Helpers.construct_microseconds(999_999, precision)
time = Timex.Time.new!(23, 59, 59, us)
with {:ok, datetime} <-
Timex.DateTime.new(
DateTime.to_date(datetime),
time,
time_zone,
Timex.Timezone.Database
) do
datetime
else
{:gap, a, _b} ->
# End of day is before the gap
a
{:ambiguous, a, _b} ->
# Choose the former of the ambiguous times
a
end
end
def beginning_of_week(
%DateTime{time_zone: time_zone, microsecond: {_, precision}} = date,
weekstart
) do
us = Timex.DateTime.Helpers.construct_microseconds(0, precision)
time = Timex.Time.new!(0, 0, 0, us)
with weekstart when is_atom(weekstart) <- Timex.standardize_week_start(weekstart),
date = Timex.Date.beginning_of_week(DateTime.to_date(date), weekstart),
{:ok, datetime} <- Timex.DateTime.new(date, time, time_zone, Timex.Timezone.Database) do
datetime
else
{:gap, _a, b} ->
# Beginning of week is after the gap
b
{:ambiguous, _a, b} ->
b
{:error, _} = err ->
err
end
end
def end_of_week(%DateTime{time_zone: time_zone, microsecond: {_, precision}} = date, weekstart) do
with weekstart when is_atom(weekstart) <- Timex.standardize_week_start(weekstart),
date = Timex.Date.end_of_week(DateTime.to_date(date), weekstart),
us = Timex.DateTime.Helpers.construct_microseconds(999_999, precision),
time = Timex.Time.new!(23, 59, 59, us),
{:ok, datetime} <- Timex.DateTime.new(date, time, time_zone, Timex.Timezone.Database) do
datetime
else
{:gap, a, _b} ->
# End of week is before the gap
a
{:ambiguous, a, _b} ->
a
{:error, _} = err ->
err
end
end
def beginning_of_year(%DateTime{year: year, time_zone: time_zone, microsecond: {_, precision}}) do
us = Timex.DateTime.Helpers.construct_microseconds(0, precision)
time = Timex.Time.new!(0, 0, 0, us)
with {:ok, datetime} <-
Timex.DateTime.new(
Timex.Date.new!(year, 1, 1),
time,
time_zone,
Timex.Timezone.Database
) do
datetime
else
{:gap, _a, b} ->
# Beginning of year is after the gap
b
{:ambiguous, _a, b} ->
b
end
end
def end_of_year(%DateTime{year: year, time_zone: time_zone, microsecond: {_, precision}}) do
us = Timex.DateTime.Helpers.construct_microseconds(999_999, precision)
time = Timex.Time.new!(23, 59, 59, us)
with {:ok, datetime} <-
Timex.DateTime.new(
Timex.Date.new!(year, 12, 31),
time,
time_zone,
Timex.Timezone.Database
) do
datetime
else
{:gap, a, _b} ->
# End of year is before the gap
a
{:ambiguous, a, _b} ->
a
end
end
def beginning_of_quarter(%DateTime{
year: year,
month: month,
time_zone: time_zone,
microsecond: {_, precision}
}) do
month = 1 + 3 * (Timex.quarter(month) - 1)
us = Timex.DateTime.Helpers.construct_microseconds(0, precision)
time = Timex.Time.new!(0, 0, 0, us)
with {:ok, datetime} <-
Timex.DateTime.new(
Timex.Date.new!(year, month, 1),
time,
time_zone,
Timex.Timezone.Database
) do
datetime
else
{:gap, _a, b} ->
# Beginning of quarter is after the gap
b
{:ambiguous, _a, b} ->
b
end
end
def end_of_quarter(%DateTime{
year: year,
month: month,
time_zone: time_zone,
microsecond: {_, precision}
}) do
month = 3 * Timex.quarter(month)
date = Timex.Date.end_of_month(Timex.Date.new!(year, month, 1))
us = Timex.DateTime.Helpers.construct_microseconds(999_999, precision)
time = Timex.Time.new!(23, 59, 59, us)
with {:ok, datetime} <- Timex.DateTime.new(date, time, time_zone, Timex.Timezone.Database) do
datetime
else
{:gap, a, _b} ->
# End of quarter is before the gap
a
{:ambiguous, a, _b} ->
a
end
end
def beginning_of_month(%DateTime{
year: year,
month: month,
time_zone: time_zone,
microsecond: {_, precision}
}) do
us = Timex.DateTime.Helpers.construct_microseconds(0, precision)
time = Timex.Time.new!(0, 0, 0, us)
with {:ok, datetime} <-
Timex.DateTime.new(
Timex.Date.new!(year, month, 1),
time,
time_zone,
Timex.Timezone.Database
) do
datetime
else
{:gap, _a, b} ->
# Beginning of month is after the gap
b
{:ambiguous, _a, b} ->
b
end
end
def end_of_month(%DateTime{
year: year,
month: month,
time_zone: time_zone,
microsecond: {_, precision}
}) do
date = Timex.Date.end_of_month(Timex.Date.new!(year, month, 1))
us = Timex.DateTime.Helpers.construct_microseconds(999_999, precision)
time = Timex.Time.new!(23, 59, 59, us)
with {:ok, datetime} <- Timex.DateTime.new(date, time, time_zone, Timex.Timezone.Database) do
datetime
else
{:gap, a, _b} ->
# End of month is before the gap
a
{:ambiguous, a, _b} ->
a
end
end
def quarter(%DateTime{year: y, month: m, day: d}),
do: Calendar.ISO.quarter_of_year(y, m, d)
def days_in_month(d), do: Date.days_in_month(d)
def week_of_month(%DateTime{:year => y, :month => m, :day => d}),
do: Timex.week_of_month(y, m, d)
def weekday(datetime), do: Timex.Date.day_of_week(datetime)
def weekday(datetime, weekstart), do: Timex.Date.day_of_week(datetime, weekstart)
def day(datetime), do: Date.day_of_year(datetime)
def is_valid?(%DateTime{
:year => y,
:month => m,
:day => d,
:hour => h,
:minute => min,
:second => sec
}) do
:calendar.valid_date({y, m, d}) and Timex.is_valid_time?({h, min, sec})
end
def iso_week(%DateTime{:year => y, :month => m, :day => d}),
do: Timex.iso_week(y, m, d)
def from_iso_day(%DateTime{year: year} = date, day) when is_day_of_year(day) do
{year, month, day_of_month} = Timex.Helpers.iso_day_to_date_tuple(year, day)
%DateTime{date | :year => year, :month => month, :day => day_of_month}
end
def set(%DateTime{} = date, options) do
validate? = Keyword.get(options, :validate, true)
Enum.reduce(options, date, fn
_option, {:error, _} = err ->
err
option, result ->
case option do
{:validate, _} ->
result
{:datetime, {{y, m, d}, {h, min, sec}}} ->
if validate? do
%{
result
| :year => Timex.normalize(:year, y),
:month => Timex.normalize(:month, m),
:day => Timex.normalize(:day, {y, m, d}),
:hour => Timex.normalize(:hour, h),
:minute => Timex.normalize(:minute, min),
:second => Timex.normalize(:second, sec)
}
else
%{
result
| :year => y,
:month => m,
:day => d,
:hour => h,
:minute => min,
:second => sec
}
end
{:date, {y, m, d}} ->
if validate? do
{yn, mn, dn} = Timex.normalize(:date, {y, m, d})
%{result | :year => yn, :month => mn, :day => dn}
else
%{result | :year => y, :month => m, :day => d}
end
{:date, %Date{} = d} ->
Timex.set(result, date: {d.year, d.month, d.day})
{:time, {h, m, s}} ->
if validate? do
%{
result
| :hour => Timex.normalize(:hour, h),
:minute => Timex.normalize(:minute, m),
:second => Timex.normalize(:second, s)
}
else
%{result | :hour => h, :minute => m, :second => s}
end
{:time, {h, m, s, ms}} ->
if validate? do
%{
result
| :hour => Timex.normalize(:hour, h),
:minute => Timex.normalize(:minute, m),
:second => Timex.normalize(:second, s),
:microsecond => Timex.normalize(:microsecond, ms)
}
else
%{result | :hour => h, :minute => m, :second => s, :microsecond => ms}
end
{:time, %Time{} = t} ->
Timex.set(result, time: {t.hour, t.minute, t.second, t.microsecond})
{:day, d} ->
if validate? do
%{result | :day => Timex.normalize(:day, {result.year, result.month, d})}
else
%{result | :day => d}
end
{:timezone, tz} ->
tz =
case tz do
%TimezoneInfo{} -> tz
_ -> Timezone.get(tz, result)
end
%{
result
| :time_zone => tz.full_name,
:zone_abbr => tz.abbreviation,
:utc_offset => tz.offset_utc,
:std_offset => tz.offset_std
}
{name, val} when name in [:year, :month, :hour, :minute, :second, :microsecond] ->
if validate? do
Map.put(result, name, Timex.normalize(name, val))
else
Map.put(result, name, val)
end
{option_name, _} ->
{:error, {:bad_option, option_name}}
end
end)
end
@doc """
Shifts the given DateTime based on a series of options.
See docs for Timex.shift/2 for details.
"""
@spec shift(DateTime.t(), list({atom(), term})) :: DateTime.t() | {:error, term}
def shift(%DateTime{} = datetime, shifts) when is_list(shifts) do
{logical_shifts, shifts} = Keyword.split(shifts, [:years, :months, :weeks, :days])
shift = calculate_shift(shifts)
shifted =
case logical_shift(datetime, logical_shifts) do
{:error, _} = err ->
err
%DateTime{} = datetime when shift != 0 ->
DateTime.add(datetime, shift, :microsecond, Timex.Timezone.Database)
%DateTime{} = datetime ->
datetime
{{ty, _, _}, %DateTime{} = orig} when ty in [:gap, :ambiguous] and shift != 0 ->
DateTime.add(orig, shift, :microsecond, Timex.Timezone.Database)
{{ty, _a, _b} = amb, _} when ty in [:gap, :ambiguous] ->
amb
end
case shifted do
{ty, a, b} when ty in [:gap, :ambiguous] ->
%AmbiguousDateTime{before: a, after: b, type: ty}
result ->
result
end
rescue
err in [FunctionClauseError] ->
case {err.module, err.function} do
{Calendar.ISO, _} ->
{:error, :invalid_date}
_ ->
reraise err, __STACKTRACE__
end
catch
:throw, {:error, _} = err ->
err
end
defp logical_shift(datetime, []), do: datetime
defp logical_shift(datetime, shifts) do
sorted = Enum.sort_by(shifts, &elem(&1, 0), &compare_unit/2)
case do_logical_shift(datetime, sorted) do
%DateTime{time_zone: time_zone} = dt ->
with {:ok, shifted} <-
DateTime.from_naive(DateTime.to_naive(dt), time_zone, Timex.Timezone.Database) do
shifted
else
{ty, _, _} = amb when ty in [:gap, :ambiguous] ->
{amb, dt}
{:error, _} = err ->
err
end
err ->
err
end
end
defp do_logical_shift(datetime, []), do: datetime
defp do_logical_shift(datetime, [{unit, value} | rest]) do
do_logical_shift(shift_by(datetime, value, unit), rest)
end
# Consider compare_unit/2 an analog of Kernel.<=/2
# We want the largest units first
defp compare_unit(:years, _), do: true
defp compare_unit(_, :years), do: false
defp compare_unit(:months, _), do: true
defp compare_unit(_, :months), do: false
defp compare_unit(:weeks, _), do: true
defp compare_unit(_, :weeks), do: false
defp compare_unit(:days, _), do: true
defp compare_unit(_, :days), do: false
defp calculate_shift(shifts), do: calculate_shift(shifts, 0)
defp calculate_shift([], acc), do: acc
defp calculate_shift([{:duration, %Duration{} = duration} | rest], acc) do
total_microseconds = Duration.to_microseconds(duration)
calculate_shift(rest, acc + total_microseconds)
end
defp calculate_shift([{:hours, value} | rest], acc) when is_integer(value) do
calculate_shift(rest, acc + value * 60 * 60 * 1_000 * 1_000)
end
defp calculate_shift([{:minutes, value} | rest], acc) when is_integer(value) do
calculate_shift(rest, acc + value * 60 * 1_000 * 1_000)
end
defp calculate_shift([{:seconds, value} | rest], acc) when is_integer(value) do
calculate_shift(rest, acc + value * 1_000 * 1_000)
end
defp calculate_shift([{:milliseconds, value} | rest], acc) when is_integer(value) do
calculate_shift(rest, acc + value * 1_000)
end
defp calculate_shift([{:microseconds, value} | rest], acc) when is_integer(value) do
calculate_shift(rest, acc + value)
end
defp calculate_shift([{k, _} | _], _acc),
do: throw({:error, {:unknown_shift_unit, k}})
defp shift_by(%DateTime{year: y, month: m, day: d} = datetime, value, :years) do
new_year = y + value
shifted = %DateTime{datetime | year: new_year}
cond do
new_year < 0 ->
{:error, :shift_to_invalid_date}
m == 2 and d == 29 and :calendar.is_leap_year(y) and :calendar.is_leap_year(new_year) ->
shifted
m == 2 and d == 29 and :calendar.is_leap_year(y) ->
# Shift to March 1st in non-leap years
%DateTime{shifted | month: 3, day: 1}
:else ->
shifted
end
end
defp shift_by(%DateTime{} = datetime, 0, :months),
do: datetime
# Positive shifts
defp shift_by(%DateTime{year: year, month: month, day: day} = datetime, value, :months)
when value > 0 do
add_years = div(value, 12)
add_months = rem(value, 12)
{year, month} =
if month + add_months <= 12 do
{year + add_years, month + add_months}
else
total_months = month + add_months
{year + add_years + 1, total_months - 12}
end
ldom = :calendar.last_day_of_the_month(year, month)
cond do
day > ldom ->
%DateTime{datetime | year: year, month: month, day: ldom}
:else ->
%DateTime{datetime | year: year, month: month}
end
end
# Negative shifts
defp shift_by(%DateTime{year: year, month: month, day: day} = datetime, value, :months) do
add_years = div(value, 12)
add_months = rem(value, 12)
{year, month} =
if month + add_months < 1 do
total_months = month + add_months
{year + (add_years - 1), 12 + total_months}
else
{year + add_years, month + add_months}
end
if year < 0 do
{:error, :shift_to_invalid_date}
else
ldom = :calendar.last_day_of_the_month(year, month)
cond do
day > ldom ->
%DateTime{datetime | year: year, month: month, day: ldom}
:else ->
%DateTime{datetime | year: year, month: month}
end
end
end
defp shift_by(datetime, value, :weeks),
do: shift_by(datetime, value * 7, :days)
defp shift_by(%DateTime{} = datetime, 0, :days),
do: datetime
# Positive shifts
defp shift_by(%DateTime{year: year, month: month, day: day} = datetime, value, :days)
when value > 0 do
ldom = :calendar.last_day_of_the_month(year, month)
cond do
day + value <= ldom ->
%DateTime{datetime | day: day + value}
month + 1 <= 12 ->
diff = ldom - day + 1
shift_by(%DateTime{datetime | month: month + 1, day: 1}, value - diff, :days)
:else ->
diff = ldom - day + 1
shift_by(%DateTime{datetime | year: year + 1, month: 1, day: 1}, value - diff, :days)
end
end
# Negative shifts
defp shift_by(%DateTime{year: year, month: month, day: day} = datetime, value, :days) do
cond do
day + value >= 1 ->
%DateTime{datetime | day: day + value}
month - 1 >= 1 ->
ldom = :calendar.last_day_of_the_month(year, month - 1)
shift_by(%DateTime{datetime | month: month - 1, day: ldom}, value + day, :days)
year == 0 ->
{:error, :shift_to_invalid_date}
:else ->
ldom = :calendar.last_day_of_the_month(year - 1, 12)
shift_by(%DateTime{datetime | year: year - 1, month: 12, day: ldom}, value + day, :days)
end
end
end
|
lib/datetime/datetime.ex
| 0.878203
| 0.550426
|
datetime.ex
|
starcoder
|
defmodule Hive.Distribution.LabledRing do
@moduledoc """
A quorum is the minimum number of nodes that a distributed cluster has to
obtain in order to be allowed to perform an operation. This can be used to
enforce consistent operation in a distributed system.
## Quorum size
You must configure this distribution strategy and specify its minimum quorum
size:
config :swarm,
distribution_strategy: Swarm.Distribution.StaticQuorumRing,
static_quorum_size: 5
It defines the minimum number of nodes that must be connected in the cluster
to allow process registration and distribution.
If there are fewer nodes currently available than the quorum size, any calls
to `Swarm.register_name/5` will return `{:error, :no_node_available}` until
enough nodes have started.
You can configure the `:kernel` application to wait for cluster formation
before starting your application during node start up. The
`sync_nodes_optional` configuration specifies which nodes to attempt to
connect to within the `sync_nodes_timeout` window, defined in milliseconds,
before continuing with startup. There is also a `sync_nodes_mandatory` setting
which can be used to enforce all nodes are connected within the timeout window
or else the node terminates.
config :kernel,
sync_nodes_optional: [:"node1@192.168.1.1", :"node2@192.168.1.2"],
sync_nodes_timeout: 60_000
The `sync_nodes_timeout` can be configured as `:infinity` to wait indefinitely
for all nodes to connect. All involved nodes must have the same value for
`sync_nodes_timeout`.
### Example
In a 9 node cluster you would configure the `:static_quorum_size` as 5. During
a network split of 4 and 5 nodes, processes on the side with 5 nodes
will continue running, whereas processes on the other 4 nodes will be stopped.
Be aware that in the running 5 node cluster, no more failures can be handled
because the remaining cluster size would be less than the required 5 node
minimum. All running processes would be stopped in the case of another single
node failure.
"""
use Swarm.Distribution.Strategy
alias Hive.Distribution.LabledRing
defstruct [:nodes]
def create do
IO.inspect("create")
%LabledRing{
nodes: []
}
end
def add_node(quorum, node) do
IO.inspect("add_node/2 #1")
role = :rpc.call(node, System, :get_env, ["ROLE"])
info = %{node: node, role: role}
IO.inspect(quorum, label: "quorum")
IO.inspect(node, label: "node")
%LabledRing{
nodes: [info | quorum.nodes]
}
end
def add_node(quorum, _node, _weight) do
IO.inspect("add_node/2 #2")
%LabledRing{
nodes: quorum.nodes
}
end
def add_nodes(quorum, nodes) do
IO.inspect("add_nodes/2")
IO.inspect(quorum, label: "quorum")
IO.inspect(nodes, label: "nodes")
%LabledRing{
nodes: quorum.nodes
}
end
def remove_node(quorum, node) do
IO.inspect("remove_node/2")
IO.inspect(node, label: "remove_node")
%LabledRing{
nodes: Enum.filter(quorum.nodes, & &1.node == node)
}
end
@doc """
Maps a key to a specific node via the current distribution strategy.
If the available nodes in the cluster are fewer than the minimum node count it returns `:undefined`.
"""
def key_to_node(%LabledRing{nodes: nodes}, map_key) do
# case length(ring.nodes) do
# node_count when node_count < static_quorum_size -> :undefined
# _ -> HashRing.key_to_node(ring, key)
# end
avail_nodes =
nodes
|> Enum.filter(& &1.role == map_key.role)
IO.inspect("key_to_node/2")
IO.inspect(nodes, label: "nodes")
IO.inspect(map_key)
IO.inspect(avail_nodes)
case Enum.count(avail_nodes) > 0 do
false -> :undefined
true ->
%{node: node} = Enum.at(avail_nodes, 0)
node
end
end
# defp static_quorum_size() do
# Application.get_env(:swarm, :static_quorum_size, 2)
# |> static_quorum_size()
# end
# defp static_quorum_size(nil), do: static_quorum_size(2)
# defp static_quorum_size(binary) when is_binary(binary) do
# binary
# |> Integer.parse()
# |> convert_to_integer()
# |> static_quorum_size()
# end
# defp static_quorum_size(size) when is_integer(size) and size > 0, do: size
# defp static_quorum_size(_size),
# do: raise("config :static_quorum_size should be a positive integer")
# defp convert_to_integer({integer, _}) when is_integer(integer), do: integer
# defp convert_to_integer(other), do: other
end
|
lib/hive/role_strategy.ex
| 0.857798
| 0.67553
|
role_strategy.ex
|
starcoder
|
defmodule AWS.SSOOIDC do
@moduledoc """
AWS Single Sign-On (SSO) OpenID Connect (OIDC) is a web service that enables a
client (such as AWS CLI or a native application) to register with AWS SSO.
The service also enables the client to fetch the user’s access token upon
successful authentication and authorization with AWS SSO. This service conforms
with the OAuth 2.0 based implementation of the device authorization grant
standard
([https://tools.ietf.org/html/rfc8628](https://tools.ietf.org/html/rfc8628)). For general information about AWS SSO, see [What is AWS Single
Sign-On?](https://docs.aws.amazon.com/singlesignon/latest/userguide/what-is.html)
in the *AWS SSO User Guide*.
This API reference guide describes the AWS SSO OIDC operations that you can call
programatically and includes detailed information on data types and errors.
AWS provides SDKs that consist of libraries and sample code for various
programming languages and platforms such as Java, Ruby, .Net, iOS, and Android.
The SDKs provide a convenient way to create programmatic access to AWS SSO and
other AWS services. For more information about the AWS SDKs, including how to
download and install them, see [Tools for Amazon Web Services](http://aws.amazon.com/tools/).
"""
@doc """
Creates and returns an access token for the authorized client.
The access token issued will be used to fetch short-term credentials for the
assigned roles in the AWS account.
"""
def create_token(client, input, options \\ []) do
path_ = "/token"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Registers a client with AWS SSO.
This allows clients to initiate device authorization. The output should be
persisted for reuse through many authentication requests.
"""
def register_client(client, input, options \\ []) do
path_ = "/client/register"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Initiates device authorization by requesting a pair of verification codes from
the authorization service.
"""
def start_device_authorization(client, input, options \\ []) do
path_ = "/device_authorization"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "awsssooidc"}
host = build_host("oidc", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/ssooidc.ex
| 0.733165
| 0.465205
|
ssooidc.ex
|
starcoder
|
defmodule Tensor.Tensor.Inspect do
alias Tensor.{Tensor}
def inspect(tensor, _opts) do
"""
#Tensor<(#{dimension_string(tensor)})
#{inspect_tensor_contents(tensor)}
>
"""
end
def dimension_string(tensor) do
tensor.dimensions |> Enum.join("×")
end
defp inspect_tensor_contents(tensor = %Tensor{dimensions: dimensions}) when length(dimensions) == 3 do
[_, deepness | _] = Tensor.dimensions(tensor)
tensor
|> Tensor.to_list
|> Enum.map(fn slice ->
slice
|> Enum.with_index
|> Enum.map(fn {row, index} ->
rowstr =
row
|> Enum.map(fn elem ->
elem
|> inspect
|> String.pad_leading(8)
end)
|> Enum.join(",")
"#{String.pad_leading("", 2 * index)}#{color(deepness, rem(index, deepness))}#{rowstr}#{IO.ANSI.reset}"
end)
|> Enum.join("\n")
end)
|> Enum.join(slice_join_str(deepness))
end
defp inspect_tensor_contents(tensor, is \\ []) do
tensor
|> Tensor.slices
|> Enum.with_index
|> Enum.map(fn {slice, i} ->
IO.inspect(slice.dimensions)
if Tensor.order(slice) <= 3 do
"""
#{inspect(:lists.reverse([i|is]))}
#{inspect_tensor_contents(slice)}
"""
else
inspect_tensor_contents(slice, [i|is])
end
end)
|> Enum.join("\n\n\n")
end
defp color(deepness, depth) when deepness <= 3, do: [[IO.ANSI.bright, IO.ANSI.white], [IO.ANSI.white], [IO.ANSI.bright, IO.ANSI.black]] |> Enum.fetch!(depth)
defp color(deepness, depth) when deepness <= 5, do: [[IO.ANSI.bright, IO.ANSI.white], [IO.ANSI.white], [IO.ANSI.bright, IO.ANSI.blue], [IO.ANSI.blue], [IO.ANSI.bright, IO.ANSI.black]] |> Enum.fetch!(depth)
defp color(deepness, depth) when deepness <= 6, do: [[IO.ANSI.bright, IO.ANSI.white], [IO.ANSI.white], [IO.ANSI.yellow], [IO.ANSI.bright, IO.ANSI.blue], [IO.ANSI.blue], [IO.ANSI.bright, IO.ANSI.black]] |> Enum.fetch!(depth)
defp color(_deepness, _depth), do: [IO.ANSI.white]
defp slice_join_str(deepness) when deepness < 4, do: "\n"
defp slice_join_str(_deepness), do: "\n\n"
end
|
lib/tensor/tensor/inspect.ex
| 0.637369
| 0.642531
|
inspect.ex
|
starcoder
|
defmodule ChatApi.SlackAuthorizations do
@moduledoc """
The SlackAuthorizations context.
"""
import Ecto.Query, warn: false
alias ChatApi.Repo
alias ChatApi.SlackAuthorizations.SlackAuthorization
@doc """
Returns the list of slack_authorizations.
## Examples
iex> list_slack_authorizations()
[%SlackAuthorization{}, ...]
"""
def list_slack_authorizations do
Repo.all(SlackAuthorization)
end
@doc """
Gets a single slack_authorization.
Raises `Ecto.NoResultsError` if the Slack authorization does not exist.
## Examples
iex> get_slack_authorization!(123)
%SlackAuthorization{}
iex> get_slack_authorization!(456)
** (Ecto.NoResultsError)
"""
def get_slack_authorization!(id), do: Repo.get!(SlackAuthorization, id)
def get_authorization_by_account(account_id) do
SlackAuthorization
|> where(account_id: ^account_id)
|> order_by(desc: :inserted_at)
|> Repo.one()
end
def create_or_update(account_id, params) do
existing = get_authorization_by_account(account_id)
if existing do
update_slack_authorization(existing, params)
else
create_slack_authorization(params)
end
end
@doc """
Creates a slack_authorization.
## Examples
iex> create_slack_authorization(%{field: value})
{:ok, %SlackAuthorization{}}
iex> create_slack_authorization(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_slack_authorization(attrs \\ %{}) do
%SlackAuthorization{}
|> SlackAuthorization.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a slack_authorization.
## Examples
iex> update_slack_authorization(slack_authorization, %{field: new_value})
{:ok, %SlackAuthorization{}}
iex> update_slack_authorization(slack_authorization, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_slack_authorization(%SlackAuthorization{} = slack_authorization, attrs) do
slack_authorization
|> SlackAuthorization.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a slack_authorization.
## Examples
iex> delete_slack_authorization(slack_authorization)
{:ok, %SlackAuthorization{}}
iex> delete_slack_authorization(slack_authorization)
{:error, %Ecto.Changeset{}}
"""
def delete_slack_authorization(%SlackAuthorization{} = slack_authorization) do
Repo.delete(slack_authorization)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking slack_authorization changes.
## Examples
iex> change_slack_authorization(slack_authorization)
%Ecto.Changeset{data: %SlackAuthorization{}}
"""
def change_slack_authorization(%SlackAuthorization{} = slack_authorization, attrs \\ %{}) do
SlackAuthorization.changeset(slack_authorization, attrs)
end
end
|
lib/chat_api/slack_authorizations.ex
| 0.81309
| 0.437523
|
slack_authorizations.ex
|
starcoder
|
defmodule Membrane.Element.Base.Filter do
@moduledoc """
Module defining behaviour for filters - elements processing data.
Behaviours for filters are specified, besides this place, in modules
`Membrane.Element.Base.Mixin.CommonBehaviour`,
`Membrane.Element.Base.Mixin.SourceBehaviour`,
and `Membrane.Element.Base.Mixin.SinkBehaviour`.
Filters can have both input and output pads. Job of a usual filter is to
receive some data on a input pad, process the data and send it through the
output pad. If these pads work in pull mode, which is the most common case,
then filter is also responsible for receiving demands on the output pad and
requesting them on the input pad (for more details, see
`c:Membrane.Element.Base.Mixin.SourceBehaviour.handle_demand/5` callback).
Filters, like all elements, can of course have multiple pads if needed to
provide more complex solutions.
"""
alias Membrane.{Buffer, Element}
alias Element.Base.Mixin
alias Element.{CallbackContext, Pad}
@doc """
Callback that is to process buffers.
For pads in pull mode it is called when buffers have been demanded (by returning
`:demand` action from any callback).
For pads in push mode it is invoked when buffers arrive.
"""
@callback handle_process_list(
pad :: Pad.ref_t(),
buffers :: list(Buffer.t()),
context :: CallbackContext.Process.t(),
state :: Element.state_t()
) :: Mixin.CommonBehaviour.callback_return_t()
@doc """
Callback that is to process buffers. In contrast to `c:handle_process_list/4`, it is
passed only a single buffer.
Called by default implementation of `c:handle_process_list/4`.
"""
@callback handle_process(
pad :: Pad.ref_t(),
buffer :: Buffer.t(),
context :: CallbackContext.Process.t(),
state :: Element.state_t()
) :: Mixin.CommonBehaviour.callback_return_t()
defmacro __using__(_) do
quote location: :keep do
use Mixin.CommonBehaviour
use Mixin.SourceBehaviour
use Mixin.SinkBehaviour
@behaviour unquote(__MODULE__)
@impl true
def membrane_element_type, do: :filter
@impl true
def handle_caps(_pad, caps, _context, state), do: {{:ok, forward: caps}, state}
@impl true
def handle_event(_pad, event, _context, state), do: {{:ok, forward: event}, state}
@impl true
def handle_demand(_pad, _size, _unit, _context, state),
do: {{:error, :handle_demand_not_implemented}, state}
@impl true
def handle_process(_pad, _buffer, _context, state),
do: {{:error, :handle_process_not_implemented}, state}
@impl true
def handle_process_list(pad, buffers, context, state) do
args_list = buffers |> Enum.map(&[pad, &1, context])
{{:ok, split: {:handle_process, args_list}}, state}
end
defoverridable handle_caps: 4,
handle_event: 4,
handle_demand: 5,
handle_process_list: 4,
handle_process: 4
end
end
end
|
lib/membrane/element/base/filter.ex
| 0.887491
| 0.45641
|
filter.ex
|
starcoder
|
defmodule Imgproxy do
@moduledoc """
`Imgproxy` generates urls for use with an [imgproxy](https://imgproxy.net) server.
"""
defstruct source_url: nil, options: [], extension: nil, prefix: nil, key: nil, salt: nil
alias __MODULE__
@type t :: %__MODULE__{
source_url: nil | String.t(),
options: keyword(list()),
extension: nil | String.t(),
prefix: nil | String.t(),
key: nil | String.t(),
salt: nil | String.t()
}
@typedoc """
A number of pixels to be used as a dimension.
"""
@type dimension :: float() | integer() | String.t()
@typedoc """
Provide type and enlarge configuration arguments to a resize option.
"""
@type resize_opts :: [
type: String.t(),
enlarge: boolean()
]
@doc """
Generate a new `t:Imgproxy.t/0` struct for the given image source URL.
"""
@spec new(String.t()) :: t()
def new(source_url) when is_binary(source_url) do
%Imgproxy{
source_url: source_url,
prefix: Application.get_env(:imgproxy, :prefix),
key: Application.get_env(:imgproxy, :key),
salt: Application.get_env(:imgproxy, :salt)
}
end
@doc """
Add a [formatting option](https://docs.imgproxy.net/generating_the_url_advanced) to the `t:Imgproxy.t/0`.
For instance, to add the [padding](https://docs.imgproxy.net/generating_the_url_advanced?id=padding) option
with a 10px padding on all sides, you can use:
iex> img = Imgproxy.new("http://example.com/image.jpg")
iex> Imgproxy.add_option(img, :padding, [10, 10, 10, 10]) |> to_string()
"https://imgcdn.example.com/insecure/padding:10:10:10:10/aHR0cDovL<KEY>"
"""
@spec add_option(t(), atom(), list()) :: t()
def add_option(%Imgproxy{options: opts} = img, name, args)
when is_atom(name) and is_list(args) do
%Imgproxy{img | options: Keyword.put(opts, name, args)}
end
@doc """
Set the [gravity](https://docs.imgproxy.net/generating_the_url_advanced?id=gravity) option.
"""
@spec set_gravity(t(), atom(), dimension(), dimension()) :: t()
def set_gravity(img, type, xoffset \\ 0, yoffset \\ 0)
def set_gravity(img, "sm", _xoffset, _yoffset) do
add_option(img, :g, [:sm])
end
def set_gravity(img, :sm, _xoffset, _yoffset) do
add_option(img, :g, [:sm])
end
def set_gravity(img, type, xoffset, yoffset) do
add_option(img, :g, [type, xoffset, yoffset])
end
@doc """
[Resize](https://docs.imgproxy.net/generating_the_url_advanced?id=resize) an image to the given width and height.
Options include:
* type: "fit" (default), "fill", or "auto"
* enlarge: enlarge if necessary (`false` by default)
"""
@spec resize(t(), dimension(), dimension(), resize_opts()) :: t()
def resize(img, width, height, opts \\ []) do
type = Keyword.get(opts, :type, "fit")
enlarge = Keyword.get(opts, :enlarge, false)
add_option(img, :rs, [type, width, height, enlarge])
end
@doc """
[Crop](https://docs.imgproxy.net/generating_the_url_advanced?id=crop) an image to the given width and height.
Accepts an optional [gravity](https://docs.imgproxy.net/generating_the_url_advanced?id=gravity) parameter, by
default it is "ce:0:0" for center gravity with no offset.
"""
@spec crop(t(), dimension(), dimension(), String.t()) :: t()
def crop(img, width, height, gravity \\ "ce:0:0") do
add_option(img, :c, [width, height, gravity])
end
@doc """
Set the file extension (which will produce an image of that type).
For instance, setting the extension to "png" will result in a PNG being created:
iex> img = Imgproxy.new("http://example.com/image.jpg")
iex> Imgproxy.set_extension(img, "png") |> to_string()
"https://imgcdn.example.com/insecure/aHR0cDovL2V4YW1wbGUuY29tL2ltYWdlLmpwZw.png"
"""
@spec set_extension(t(), String.t()) :: t()
def set_extension(img, "." <> extension), do: set_extension(img, extension)
def set_extension(img, extension), do: %Imgproxy{img | extension: extension}
@doc """
Generate an imgproxy URL.
## Example
iex> Imgproxy.to_string(Imgproxy.new("https://placekitten.com/200/300"))
"https://imgcdn.example.com/insecure/aHR0cHM6Ly9wbGFjZWtpdHRlbi5jb20vMjAwLzMwMA"
"""
@spec to_string(t()) :: String.t()
defdelegate to_string(img), to: String.Chars.Imgproxy
end
defimpl String.Chars, for: Imgproxy do
def to_string(%Imgproxy{prefix: prefix, key: key, salt: salt} = img) do
path = build_path(img)
signature = gen_signature(path, key, salt)
Path.join([prefix || "", signature, path])
end
# @spec build_path(img_url :: String.t(), opts :: image_opts) :: String.t()
defp build_path(%Imgproxy{source_url: source_url, options: opts, extension: ext}) do
["/" | Enum.map(opts, &option_to_string/1)]
|> Path.join()
|> Path.join(encode_source_url(source_url, ext))
end
defp encode_source_url(source_url, nil) do
Base.url_encode64(source_url, padding: false)
end
defp encode_source_url(source_url, extension) do
encode_source_url(source_url, nil) <> "." <> extension
end
defp option_to_string({name, args}) when is_list(args) do
Enum.map_join([name | args], ":", &Kernel.to_string/1)
end
defp gen_signature(path, key, salt) when is_binary(key) and is_binary(salt) do
decoded_key = Base.decode16!(key, case: :lower)
decoded_salt = Base.decode16!(salt, case: :lower)
:hmac
|> :crypto.mac(:sha256, decoded_key, decoded_salt <> path)
|> Base.url_encode64(padding: false)
end
defp gen_signature(_path, _key, _salt), do: "insecure"
end
|
lib/imgproxy.ex
| 0.846149
| 0.489137
|
imgproxy.ex
|
starcoder
|
defmodule Abacus.Runtime.Scope do
@moduledoc """
Contains helper functions to work with the scope of an Abacus script at runtime
"""
@doc """
Tries to get values from a subject.
Subjects can be:
* maps
* keyword lists
* lists (with integer keys)
* nil (special case so access to undefined variables will not crash execution)
Keys can be:
* strings
* atoms (you can pass a resolve map that converts the given atom key into a string key)
* positive integers (for lists only)
The Abacus parser will reference this function extensively like this:
`a.b[2]` will turn into
```
get_in(get_in(var0, :var1, var_lookup), 2, var_lookup)
```
in this case, `var_lookup` will be:
```
%{
var1: "a",
var2: "b"
}
```
"""
@type key :: String.t | atom
@type index :: integer
@spec get_in(nil, key | index, map) :: nil
@spec get_in(map, key, map) :: nil | term
@spec get_in(list, index, map) :: nil | term
@spec get_in(list, key, map) :: nil | term
def get_in(subject, key_or_index, var_lookup \\ %{})
def get_in(nil, _, _), do: nil
def get_in(subject, key, lookup) when is_atom(key) do
key = Map.get(lookup, key, to_string(key))
get_in(subject, key, lookup)
end
def get_in(subject, key, _) when (is_map(subject) or is_list(subject)) and is_binary(key) do
Enum.reduce_while(subject, nil, fn {k, v}, _ ->
case to_string(k) do
^key -> {:halt, v}
_ -> {:cont, nil}
end
end)
end
def get_in(list, index, _) when is_list(list) and is_integer(index) and index >= 0 do
Enum.at(list, index, nil)
end
@doc """
Renames first-level variables into their respective `:"var\#{x}"` atoms
"""
def prepare_scope(scope, lookup) when is_map(scope) or is_list(scope) do
scope = Map.merge(default_scope(), Enum.into(scope, %{}))
Enum.map(scope, fn
{k, v} ->
case Map.get(lookup, to_string(k)) do
nil ->
# will not be used anyway
{k, v}
varname ->
{varname, v}
end
v ->
v
end)
|> Enum.filter(fn
{bin, _} when is_binary(bin) -> false
_ -> true
end)
end
def prepare_scope(primitive, _), do: primitive
@math_funs_1 ~w[
acos
acosh
asin
asinh
atan
atanh
ceil
cos
cosh
exp
floor
log
log10
log2
sin
sinh
sqrt
tan
tanh
]a
@doc """
Returns a scope with some default functions and constants
"""
def default_scope do
constants = %{
PI: :math.pi(),
round: &apply(Float, :round, [&1, &2])
}
Enum.map(@math_funs_1, fn fun ->
{fun, &apply(:math, fun, [&1])}
end)
|> Enum.into(%{})
|> Map.merge(constants)
end
end
|
lib/runtime/scope.ex
| 0.884155
| 0.816955
|
scope.ex
|
starcoder
|
defmodule SvgBuilder.Shape do
import XmlBuilder
alias SvgBuilder.{Element, Units}
@moduledoc """
This module enables creation of basic shapes.
"""
@doc """
Create a rectangle element.
x: The x-axis coordinate of the side of the rectangle which has the smaller
x-axis coordinate value in the current user coordinate system.
y: The y-axis coordinate of the side of the rectangle which has the smaller
y-axis coordinate value in the current user coordinate system.
width: Width of the rectangle.
height: Height of the rectangle.
children: List of child elements.
"""
@spec rect(Units.len_t(), Units.len_t(), Units.len_t(), Units.len_t(), [Element.t()]) ::
Element.t()
def rect(x, y, width, height, children \\ []) do
element(
:rect,
%{x: Units.len(x), y: Units.len(y), width: Units.len(width), height: Units.len(height)},
children
)
end
@doc """
Create a rounded rectangle element.
x: The x-axis coordinate of the side of the rectangle which has the smaller
x-axis coordinate value in the current user coordinate system.
y: The y-axis coordinate of the side of the rectangle which has the smaller
y-axis coordinate value in the current user coordinate system.
width: Width of the rectangle.
height: Height of the rectangle.
rx : The x-axis radius of the ellipse used to round off the corners of the rectangle.
ry: The y-axis radius of the ellipse used to round off the corners of the rectangle.
children: List of child elements.
"""
@spec rounded_rect(
Units.len_t(),
Units.len_t(),
Units.len_t(),
Units.len_t(),
Units.len_t(),
Units.len_t(),
[Element.t()]
) :: Element.t()
def rounded_rect(x, y, width, height, rx, ry, children \\ []) do
element(
:rect,
%{
x: Units.len(x),
y: Units.len(y),
width: Units.len(width),
height: Units.len(height),
rx: Units.len(rx),
ry: Units.len(ry)
},
children
)
end
@doc """
Create a circle element.
cx: The x-axis coordinate of the center of the circle.
cy: The y-axis coordinate of the center of the circle.
r: Radius of the circle.
children: List of child elements.
"""
@spec circle(Units.len_t(), Units.len_t(), Units.len_t(), [Element.t()]) :: Element.t()
def circle(cx, cy, r, children \\ []) do
element(:circle, %{cx: Units.len(cx), cy: Units.len(cy), r: Units.len(r)}, children)
end
@doc """
Create an ellipse element.
cx: The x-axis coordinate of the center of the ellipse
cy: The y-axis coordinate of the center of the ellipse
rx: x-axis radius of the ellipse.
ry: y-axir radius of the ellipse.
children: List of child elements.
"""
@spec ellipse(Units.len_t(), Units.len_t(), Units.len_t(), Units.len_t(), [Element.t()]) ::
Element.t()
def ellipse(cx, cy, rx, ry, children \\ []) do
element(
:ellipse,
%{cx: Units.len(cx), cy: Units.len(cy), rx: Units.len(rx), ry: Units.len(ry)},
children
)
end
@doc """
Create a line element.
x1, y1 : coordinates of the start of the line.
x2, y2 : coordinates of the end of the line.
"""
@spec line(Units.len_t(), Units.len_t(), Units.len_t(), Units.len_t(), [Element.t()]) ::
Element.t()
def line(x1, y1, x2, y2, children \\ []) do
element(
:line,
%{x1: Units.len(x1), y1: Units.len(y1), x2: Units.len(x2), y2: Units.len(y2)},
children
)
end
@doc """
Create a polyline element.
points : a list of x, y coordinates as tuples.
## Example
iex> SvgBuilder.Shape.polyline([{0,0},{2,3},{3,4},{6,6}])
{:polyline, %{points: "0, 0, 2, 3, 3, 4, 6, 6"}, []}
"""
@spec polyline([{Units.len_t(), Units.len_t()}], [Element.t()]) :: Element.t()
def polyline(points, children \\ []) do
element(:polyline, %{points: points_to_string(points, "")}, children)
end
@doc """
Create a polygon element.
points : a list of x, y coordinates as tuples.
## Example
iex> SvgBuilder.Shape.polygon([{0,0},{2,3},{3,4},{6,6}])
{:polygon, %{points: "0, 0, 2, 3, 3, 4, 6, 6"}, []}
"""
@spec polygon([{Units.len_t(), Units.len_t()}], [Element.t()]) :: Element.t()
def polygon(points, children \\ []) do
element(:polygon, %{points: points_to_string(points, "")}, children)
end
defp points_to_string([{x, y}], acc) do
String.trim("#{acc} #{Units.len(x)}, #{Units.len(y)}")
end
defp points_to_string([{x, y} | points], acc) do
points_to_string(points, "#{acc} #{Units.len(x)}, #{Units.len(y)},")
end
end
|
lib/shape.ex
| 0.934253
| 0.829216
|
shape.ex
|
starcoder
|
defmodule Riptide.Store.Composite do
@moduledoc """
This module provides a macro to define a store that splits up the data tree between various other stores. It is implemented via pattern matching paths that are being written or read and specifying which store to go to.
## Usage
```elixir
defmodule Todolist.Store do
use Riptide.Store.Composite
@memory {Riptide.Store.Memory, []}
@local {Riptide.Store.LMDB, directory: "data"}
@shared {Riptide.Store.Postgres, []}
def store(), do: [
@memory,
@local,
@shared,
]
# Any path starting with ["shared"] is saved in a shared postgres instance
def which_path(["shared" | _rest]), do: @shared
# Any path starting with ["tmp"] is kept only in memory
def which_path(["tmp" | _rest]), do: @memory
# Default catch all
def which_path(_), do: @local
end
```
## Configuration
```elixir
config :riptide,
store: %{
read: {Todolist.Store, []},
write: {Todolist.Store, []},
}
```
"""
@doc """
List of stores to initialize that are used by this module.
"""
@callback stores() :: any
@doc """
For a given path, return which store to use. Take advantage of pattern matching to specify broad areas.
"""
@callback which_store(path :: any()) :: {atom(), any()}
defmacro __using__(_opts) do
quote do
@behaviour Riptide.Store.Composite
@behaviour Riptide.Store
def init(_opts) do
Enum.each(stores(), fn {store, opts} ->
:ok = store.init(opts)
end)
end
def mutation(merges, deletes, _opts) do
groups =
Enum.reduce(merges, %{}, fn merge = {path, value}, collect ->
store = which_store(path)
path = [store, :merges]
existing = Dynamic.get(collect, path, [])
Dynamic.put(collect, path, [merge | existing])
end)
groups =
Enum.reduce(deletes, groups, fn delete = {path, value}, collect ->
store = which_store(path)
path = [store, :deletes]
existing = Dynamic.get(collect, path, [])
Dynamic.put(collect, path, [delete | existing])
end)
:ok =
Enum.each(groups, fn {{store, store_opts}, data} ->
merges = Map.get(data, :merges, [])
deletes = Map.get(data, :deletes, [])
store.mutation(merges, deletes, store_opts)
end)
end
def query(layers, _opts) do
groups =
Enum.reduce(layers, %{}, fn merge = {path, value}, collect ->
store = which_store(path)
existing = Map.get(collect, store, [])
Map.put(collect, store, [merge | existing])
end)
Stream.flat_map(groups, fn {{store, store_opts}, layers} ->
store.query(layers, store_opts)
end)
end
end
end
end
|
packages/elixir/lib/riptide/store/store_composite.ex
| 0.811713
| 0.819424
|
store_composite.ex
|
starcoder
|
defmodule Liquex.Argument do
@moduledoc false
alias Liquex.Context
alias Liquex.Indifferent
@type field_t :: any
@type argument_t ::
{:field, [field_t]}
| {:literal, field_t}
| {:inclusive_range, [begin: field_t, end: field_t]}
@spec eval(argument_t | [argument_t], Context.t()) :: field_t
def eval([argument], context), do: eval(argument, context)
def eval({:field, accesses}, %Context{variables: variables}),
do: do_eval(variables, accesses)
def eval({:literal, literal}, _context), do: literal
def eval({:inclusive_range, [begin: begin_value, end: end_value]}, context),
do: eval(begin_value, context)..eval(end_value, context)
def eval({:keyword, [key, value]}, context), do: {key, eval(value, context)}
defp do_eval(value, []), do: apply_lazy(value, nil)
defp do_eval(nil, _), do: nil
# Special case ".first"
defp do_eval(value, [{:key, "first"} | tail]) when is_list(value) do
value
|> Enum.at(0)
|> apply_lazy(value)
|> do_eval(tail)
end
# Special case ".size"
defp do_eval(value, [{:key, "size"} | tail]) when is_list(value) do
value
|> length()
|> do_eval(tail)
end
defp do_eval(value, [{:key, key} | tail]) do
value
|> Indifferent.get(key)
|> apply_lazy(value)
|> do_eval(tail)
end
defp do_eval(value, [{:accessor, accessor} | tail]) do
value
|> Enum.at(accessor)
|> apply_lazy(value)
|> do_eval(tail)
end
# Apply a lazy function if needed
defp apply_lazy(fun, _parent) when is_function(fun, 0), do: fun.()
defp apply_lazy(fun, parent) when is_function(fun, 1), do: fun.(parent)
defp apply_lazy(value, _), do: value
def assign(context, [argument], value), do: assign(context, argument, value)
def assign(%Context{variables: variables} = context, {:field, accesses}, value),
do: %{context | variables: do_assign(variables, accesses, value)}
defp do_assign(variables, [{:key, key} | tail], value) do
case tail do
[] -> Map.put(variables, key, value)
_ -> Map.update(variables, key, nil, &do_assign(&1, tail, value))
end
end
defp do_assign(variables, [{:accessor, index} | tail], value) do
case tail do
[] ->
List.replace_at(variables, index, value)
_ ->
value =
variables
|> Enum.at(index)
|> do_assign(tail, value)
List.replace_at(variables, index, value)
end
end
defp do_assign(_variables, [], value), do: value
defp do_assign(_, _, _), do: raise(LiquexError, "Could not assign value")
end
|
lib/liquex/argument.ex
| 0.790288
| 0.526525
|
argument.ex
|
starcoder
|
defmodule AstroEx.Unit.Degrees do
@moduledoc """
Degrees
"""
alias AstroEx.Unit
alias AstroEx.Unit.{Arcmin, Arcsec, DMS, HMS, Radian}
@enforce_keys [:value]
defstruct [:value]
@typep degree :: -360..360 | float()
@type t :: %__MODULE__{value: degree()}
@doc """
Creates a new `AstroEx.Unit.Degrees` struct
## Examples
iex> AstroEx.Unit.Degrees.new(180)
#AstroEx.Unit.Degrees<180.0>
iex> AstroEx.Unit.Degrees.new(180.0)
#AstroEx.Unit.Degrees<180.0>
"""
def new(value) when is_float(value), do: %__MODULE__{value: value}
def new(value) when is_integer(value), do: new(value * 1.0)
@doc """
Converts `AstroEx.Unit.Degrees` to a `AstroEx.Unit.Arcmin`
## Examples
iex> 180
...> |> AstroEx.Unit.Degrees.new()
...> |> AstroEx.Unit.Degrees.to_arcmin()
#AstroEx.Unit.Arcmin<10800.0>
iex> 180
...> |> AstroEx.Unit.Degrees.new()
...> |> AstroEx.Unit.cast(AstroEx.Unit.Arcmin)
#AstroEx.Unit.Arcmin<10800.0>
"""
def to_arcmin(%__MODULE__{value: value}),
do: value |> Arcmin.from_degrees()
@doc """
Converts `AstroEx.Unit.Degrees` to a `AstroEx.Unit.Arcsec`
## Examples
iex> 180
...> |> AstroEx.Unit.Degrees.new()
...> |> AstroEx.Unit.Degrees.to_arcsec()
#AstroEx.Unit.Arcsec<648000.0>
iex> 180
...> |> AstroEx.Unit.Degrees.new()
...> |> AstroEx.Unit.cast(AstroEx.Unit.Arcsec)
#AstroEx.Unit.Arcsec<648000.0>
"""
def to_arcsec(%__MODULE__{value: value}),
do: value |> Arcsec.from_degrees()
@doc """
Converts `AstroEx.Unit.Degrees` to a `AstroEx.Unit.DMS`
## Examples
iex> 180
...> |> AstroEx.Unit.Degrees.new()
...> |> AstroEx.Unit.Degrees.to_dms()
#AstroEx.Unit.DMS<180:00:00.0>
iex> 180
...> |> AstroEx.Unit.Degrees.new()
...> |> AstroEx.Unit.cast(AstroEx.Unit.DMS)
#AstroEx.Unit.DMS<180:00:00.0>
"""
def to_dms(%__MODULE__{value: value}),
do: value |> DMS.from_degrees()
@doc """
Converts `AstroEx.Unit.Degrees` to a `AstroEx.Unit.HMS`
## Examples
iex> 180
...> |> AstroEx.Unit.Degrees.new()
...> |> AstroEx.Unit.Degrees.to_hms()
#AstroEx.Unit.HMS<12:00:00.0>
iex> 180
...> |> AstroEx.Unit.Degrees.new()
...> |> AstroEx.Unit.cast(AstroEx.Unit.HMS)
#AstroEx.Unit.HMS<12:00:00.0>
"""
def to_hms(%__MODULE__{value: value}),
do: value |> HMS.from_degrees()
@doc """
Converts `AstroEx.Unit.Degrees` to a `AstroEx.Unit.Radian`
## Examples
iex> degrees = AstroEx.Unit.Degrees.new(180)
iex> AstroEx.Unit.Degrees.to_radian(degrees)
#AstroEx.Unit.Radian<3.141593>
iex> 180
...> |> AstroEx.Unit.Degrees.new()
...> |> AstroEx.Unit.Degrees.to_radian()
#AstroEx.Unit.Radian<3.141593>
iex> 180
...> |> AstroEx.Unit.Degrees.new()
...> |> AstroEx.Unit.cast(AstroEx.Unit.Radian)
#AstroEx.Unit.Radian<3.141593>
"""
def to_radian(degrees), do: Radian.from_degrees(degrees)
@doc """
Returns the `AstroEx.Unit.Degrees` or converts a `Integer`/`Float`
to a `AstroEx.Unit.Degrees`
## Examples
iex> degrees = AstroEx.Unit.Degrees.new(15)
iex> AstroEx.Unit.Degrees.from_degrees(degrees)
#AstroEx.Unit.Degrees<15.0>
iex> AstroEx.Unit.Degrees.from_degrees(15)
#AstroEx.Unit.Degrees<15.0>
iex> AstroEx.Unit.Degrees.from_degrees(180.0)
#AstroEx.Unit.Degrees<180.0>
"""
def from_degrees(%__MODULE__{} = deg), do: deg
def from_degrees(val) when is_integer(val) or is_float(val), do: new(val)
defimpl AstroEx.Unit, for: __MODULE__ do
alias AstroEx.Unit.{Arcmin, Arcsec, Degrees, DMS, HMS, Radian}
def cast(%Degrees{} = degrees, Arcmin), do: Degrees.to_arcmin(degrees)
def cast(%Degrees{} = degrees, Arcsec), do: Degrees.to_arcsec(degrees)
def cast(%Degrees{} = degrees, Degrees), do: degrees
def cast(%Degrees{} = degrees, DMS), do: Degrees.to_dms(degrees)
def cast(%Degrees{} = degrees, HMS), do: Degrees.to_hms(degrees)
def cast(%Degrees{} = degrees, Radian), do: Degrees.to_radian(degrees)
def cast(%{value: value}, Float), do: value
def cast(%{value: value}, Integer), do: trunc(value)
def to_string(%{value: value}) when is_integer(value),
do: Integer.to_string(value)
def to_string(%{value: value}) when is_float(value),
do: :erlang.float_to_binary(value, [:compact, decimals: 6])
def from_degrees(val), do: Degrees.from_degrees(val)
end
defimpl Inspect, for: __MODULE__ do
alias AstroEx.Unit
import Inspect.Algebra
def inspect(value, _opts) do
value = Unit.to_string(value)
concat(["#AstroEx.Unit.Degrees<", value, ">"])
end
end
end
|
lib/astro_ex/unit/degrees.ex
| 0.927248
| 0.540257
|
degrees.ex
|
starcoder
|
defmodule MatrexNumerix.Distance do
@moduledoc """
Distance functions between two vectors.
"""
import Matrex.Guards
import MatrexNumerix.LinearAlgebra
alias MatrexNumerix.{Common, Correlation, Statistics}
@doc """
Mean squared error, the average of the squares of the errors
betwen two vectors, i.e. the difference between predicted
and actual values.
"""
@spec mse(Matrex.t(), Matrex.t()) :: Common.maybe_float()
def mse(x = %Matrex{}, y = %Matrex{}) do
p = Matrex.pow(x |> Matrex.subtract(y), 2)
Statistics.mean(p)
end
def mse(x = %Matrex{}, y = %Matrex{}, w = %Matrex{}) do
p = Matrex.pow(x |> Matrex.subtract(y), 2)
Statistics.mean(p |> Matrex.dot(w))
end
@doc """
Root mean square error of two vectors, or simply the
square root of mean squared error of the same set of
values. It is a measure of the differences between
predicted and actual values.
"""
@spec rmse(Matrex.t(), Matrex.t()) :: Common.maybe_float()
def rmse(vector1, vector2) do
:math.sqrt(mse(vector1, vector2))
end
def rmse(vector1, vector2, weights) do
:math.sqrt(mse(vector1, vector2, weights))
end
@doc """
The Pearson's distance between two vectors.
"""
@spec pearson(Matrex.t(), Matrex.t()) :: Common.maybe_float()
def pearson(vector1, vector2) do
1.0 - Correlation.pearson(vector1, vector2)
end
@doc """
The Minkowski distance between two vectors.
"""
@spec minkowski(Matrex.t(), Matrex.t(), integer) :: Common.maybe_float()
def minkowski(x = %Matrex{}, y = %Matrex{}, p \\ 3) do
norm(p, x |> Matrex.subtract(y))
end
def minkowski(x = %Matrex{}, y = %Matrex{}, w = %Matrex{}, p) do
norm(p, x |> Matrex.subtract(y) |> Matrex.dot(w))
end
@doc """
The Euclidean distance between two vectors.
"""
@spec euclidean(Matrex.t(), Matrex.t()) :: Common.maybe_float()
def euclidean(x = %Matrex{}, y = %Matrex{}) do
l2_norm(x |> Matrex.subtract(y))
end
def euclidean(x = %Matrex{}, y = %Matrex{}, w = %Matrex{}) do
l2_norm(x |> Matrex.subtract(y), w)
end
@doc """
The Euclidean distance between two vectors.
"""
@spec sq_euclidean(Matrex.t(), Matrex.t()) :: Common.maybe_float()
def sq_euclidean(x = %Matrex{}, y = %Matrex{}) do
l2_norm(x |> Matrex.subtract(y)) |> :math.pow(2)
end
def sq_euclidean(x = %Matrex{}, y = %Matrex{}, w = %Matrex{}) do
l2_norm(x |> Matrex.subtract(y), w) |> :math.pow(2)
end
@doc """
The Manhattan distance between two vectors.
"""
@spec manhattan(Matrex.t(), Matrex.t()) :: Common.maybe_float()
def manhattan(x = %Matrex{}, y = %Matrex{}) do
l1_norm(x |> Matrex.subtract(y) )
end
def manhattan(x = %Matrex{}, y = %Matrex{}, w = %Matrex{}) do
l1_norm(x |> Matrex.subtract(y), w)
end
@doc """
The Jaccard distance (1 - Jaccard index) between two vectors.
"""
@spec jaccard(Matrex.t(), Matrex.t()) :: Common.maybe_float()
def jaccard(
matrex_data(rows1, columns1, _data1, _first),
matrex_data(rows2, columns2, _data2, _second)
) when rows1 != rows2 or columns1 != columns2,
do: raise %ArgumentError{message: "incorrect sizes"}
def jaccard(vector1, vector2) do
vector1
|> Stream.zip(vector2)
|> Enum.reduce({0, 0}, fn {x, y}, {intersection, union} ->
case {x, y} do
{x, y} when x == 0 or y == 0 ->
{intersection, union}
{x, y} when x == y ->
{intersection + 1, union + 1}
_ ->
{intersection, union + 1}
end
end)
|> to_jaccard_distance
end
def diff_conv(method, x, y) do
w = Matrex.ones(x |> Matrex.size() |> elem(0), 1)
diff_conv(method, x, y, w)
end
def diff_conv(method,
vector_data(_columns1, _data1, _first) = xx,
vector_data(_columns2, _data2, _second) = yy,
weights) do
{1, nobsx} = Matrex.size(xx)
{1, nobsy} = Matrex.size(yy)
weights = weights || Matrex.ones(1, 1)
{_dimw, nobsw} = Matrex.size(weights)
(nobsx == nobsy == nobsw) || %ArgumentError{message: "nobs(xx) != nobs(yy) != nobs(weights)"}
dist_func =
case method do
:euclidian -> &euclidean/3
:sq_euclidian -> &sq_euclidean/3
:manhattan -> &manhattan/3
:mse -> &mse/3
:rmse -> &rmse/3
:minkowski -> &minkowski/3
custom when is_function(custom) -> custom
end
for i <- 1..nobsx, into: [] do
for j <- 1..nobsy, into: [] do
dist_func.(xx |> Matrex.column(i), yy |> Matrex.column(j), weights)
end
end
|> Matrex.new()
end
def diff_conv(method, %Matrex{} = xx, %Matrex{} = yy, weights) do
{dimx, nobsx} = Matrex.size(xx)
{dimy, nobsy} = Matrex.size(yy)
weights = weights || Matrex.ones(dimx, 1)
{dimw, nobsw} = Matrex.size(weights)
(dimx == dimy == dimw) || %ArgumentError{message: "size(xx, 1) != size(yy, 1) != size(weights, 1)"}
(nobsx == nobsy == nobsw) || %ArgumentError{message: "nobs(xx) != nobs(yy) != nobs(weights)"}
for idx <- 1..dimx do
diff_conv(method, xx[idx], yy[idx])
end
end
defp to_jaccard_distance({intersection, union}) do
1 - intersection / union
end
end
|
lib/distance.ex
| 0.769817
| 0.778986
|
distance.ex
|
starcoder
|
defmodule Estated.Property.MarketAssessment do
@moduledoc "Market assessment information as provided by the assessor."
@moduledoc since: "0.2.0"
# MarketAssessment is very similar to Assessment, but they are separate in Estated and
# documented differently.
# credo:disable-for-this-file Credo.Check.Design.DuplicatedCode
defstruct [
:year,
:land_value,
:improvement_value,
:total_value
]
@typedoc "Tax assessment information as provided by the assessor."
@typedoc since: "0.2.0"
@type t :: %__MODULE__{
year: year() | nil,
land_value: land_value() | nil,
improvement_value: improvement_value() | nil,
total_value: total_value() | nil
}
@typedoc """
The year the market assessment was performed.
Eg. **2017**
"""
@typedoc since: "0.2.0"
@type year :: pos_integer()
@typedoc """
The market land value as determined by the assessor.
Eg. **3000**
"""
@typedoc since: "0.2.0"
@type land_value :: integer()
@typedoc """
The market improvement value as determined by the assessor.
Eg. **12000**
"""
@typedoc since: "0.2.0"
@type improvement_value :: integer()
@typedoc """
The total market value as determined by the assessor.
Eg. **15000**
"""
@typedoc since: "0.2.0"
@type total_value :: integer()
@doc false
@doc since: "0.2.0"
@spec cast_list([map()]) :: [t()]
def cast_list(market_assessments) when is_list(market_assessments) do
Enum.map(market_assessments, &cast/1)
end
@spec cast_list(nil) :: []
def cast_list(nil) do
[]
end
defp cast(%{} = market_assessment) do
Enum.reduce(market_assessment, %__MODULE__{}, &cast_field/2)
end
defp cast_field({"year", year}, acc) do
%__MODULE__{acc | year: year}
end
defp cast_field({"land_value", land_value}, acc) do
%__MODULE__{acc | land_value: land_value}
end
defp cast_field({"improvement_value", improvement_value}, acc) do
%__MODULE__{acc | improvement_value: improvement_value}
end
defp cast_field({"total_value", total_value}, acc) do
%__MODULE__{acc | total_value: total_value}
end
defp cast_field(_map_entry, acc) do
acc
end
end
|
lib/estated/property/market_assessment.ex
| 0.822617
| 0.482551
|
market_assessment.ex
|
starcoder
|
defmodule Prometheus.Metric.Summary do
@moduledoc """
Summary metric, to track the size of events.
Example use cases for Summaries:
- Response latency;
- Request size;
- Response size.
Example:
```
defmodule MyProxyInstrumenter do
use Prometheus.Metric
## to be called at app/supervisor startup.
## to tolerate restarts use declare.
def setup() do
Summary.declare([name: :request_size_bytes,
help: "Request size in bytes."])
Summary.declare([name: :response_size_bytes,
help: "Response size in bytes."])
end
def observe_request(size) do
Summary.observe([name: :request_size_bytes], size)
end
def observe_response(size) do
Summary.observe([name: :response_size_bytes], size)
end
end
```
"""
use Prometheus.Erlang, :prometheus_summary
@doc """
Creates a summary using `spec`.
Summary cannot have a label named "quantile".
Raises `Prometheus.MissingMetricSpecKeyError` if required `spec` key is missing.<br>
Raises `Prometheus.InvalidMetricNameError` if metric name is invalid.<br>
Raises `Prometheus.InvalidMetricHelpError` if help is invalid.<br>
Raises `Prometheus.InvalidMetricLabelsError` if labels isn't a list.<br>
Raises `Prometheus.InvalidMetricNameError` if label name is invalid.<br>
Raises `Prometheus.InvalidValueError` exception if duration_unit is unknown or
doesn't match metric name.<br>
Raises `Prometheus.MFAlreadyExistsError` if a summary with the same `spec`
already exists.
"""
delegate new(spec)
@doc """
Creates a summary using `spec`.
Summary cannot have a label named "quantile".
If a summary with the same `spec` exists returns `false`.
Raises `Prometheus.MissingMetricSpecKeyError` if required `spec` key is missing.<br>
Raises `Prometheus.InvalidMetricNameError` if metric name is invalid.<br>
Raises `Prometheus.InvalidMetricHelpError` if help is invalid.<br>
Raises `Prometheus.InvalidMetricLabelsError` if labels isn't a list.<br>
Raises `Prometheus.InvalidMetricNameError` if label name is invalid;<br>
Raises `Prometheus.InvalidValueError` exception if duration_unit is unknown or
doesn't match metric name.
"""
delegate declare(spec)
@doc """
Observes the given amount.
Raises `Prometheus.InvalidValueError` exception if `amount` isn't a number.<br>
Raises `Prometheus.UnknownMetricError` exception if a summary for `spec`
can't be found.<br>
Raises `Prometheus.InvalidMetricArityError` exception if labels count mismatch.
"""
delegate_metric observe(spec, amount \\ 1)
@doc """
Observes the amount of time spent executing `body`.
Raises `Prometheus.UnknownMetricError` exception if a summary for `spec`
can't be found.<br>
Raises `Prometheus.InvalidMetricArityError` exception if labels count mismatch.
Raises `Prometheus.InvalidValueError` exception if `fun` isn't a function or block.
"""
defmacro observe_duration(spec, body) do
env = __CALLER__
Prometheus.Injector.inject(
fn block ->
quote do
start_time = :erlang.monotonic_time()
try do
unquote(block)
after
end_time = :erlang.monotonic_time()
Prometheus.Metric.Summary.observe(unquote(spec), end_time - start_time)
end
end
end,
env,
body
)
end
@doc """
Removes summary series identified by spec.
Raises `Prometheus.UnknownMetricError` exception if a summary for `spec`
can't be found.<br>
Raises `Prometheus.InvalidMetricArityError` exception if labels count mismatch.
"""
delegate_metric remove(spec)
@doc """
Resets the value of the summary identified by `spec`.
Raises `Prometheus.UnknownMetricError` exception if a summary for `spec`
can't be found.<br>
Raises `Prometheus.InvalidMetricArityError` exception if labels count mismatch.
"""
delegate_metric reset(spec)
@doc """
Returns the value of the summary identified by `spec`. If there is no summary for
given labels combination, returns `:undefined`.
If duration unit set, sum will be converted to the duration unit.
[Read more here.](time.html)
Raises `Prometheus.UnknownMetricError` exception if a summary for `spec`
can't be found.<br>
Raises `Prometheus.InvalidMetricArityError` exception if labels count mismatch.
"""
delegate_metric value(spec)
end
|
astreu/deps/prometheus_ex/lib/prometheus/metric/summary.ex
| 0.955703
| 0.873377
|
summary.ex
|
starcoder
|
defmodule OMG.Performance.ByzantineEvents do
@moduledoc """
OMG network child chain server byzantine event test entrypoint. Setup and runs performance byzantine tests.
# Usage
See functions in this module for options available
## start_dos_get_exits runs a test to get exit data for given 10 positions for 3 users
```
mix run --no-start -e \
'
OMG.Performance.ByzantineEvents.Generators.stream_utxo_positions() |>
Enum.take(10) |> OMG.Performance.ByzantineEvents.start_dos_get_exits(3)
'
```
__ASSUMPTIONS:__
This test should be run on testnet filled with transactions make sure you followed instructions in `docs/demo_05.md`
and `geth`, `omg_child_chain` and `omg_watcher` are running; and watcher is fully synced.
Expected result of running the above command should looks like:
```
[
%{span_ms: 232000, corrects_count: 10, errors_count: 0},
%{span_ms: 221500, corrects_count: 10, errors_count: 0},
%{span_ms: 219900, corrects_count: 10, errors_count: 0},
]
```
where the sum of `corrects_count + errors_count` should equal to `length(positions)`.
If all passed position was unspent there should be no errors.
"""
use OMG.Utils.LoggerExt
alias OMG.Eth
alias OMG.Performance.ByzantineEvents.DoSExitWorker
alias OMG.Performance.HttpRPC.WatcherClient
@type stats_t :: %{
span_ms: non_neg_integer(),
corrects_count: non_neg_integer(),
errors_count: non_neg_integer()
}
@watcher_url Application.get_env(:omg_performance, :watcher_url)
@micros_in_millisecond 1000
@doc """
For given utxo positions and given number of users start fetching exit data from Watcher.
User tasks are run asynchronously, each user receives the same positions list, shuffle its order
and call Watcher for exit data sequentially one position at a time.
"""
@spec start_dos_get_exits([non_neg_integer()], [OMG.TestHelper.entity()], watcher_url: binary()) ::
stats_t()
def start_dos_get_exits(positions, dos_users, watcher_url \\ @watcher_url) do
1..dos_users
|> Enum.map(fn _ ->
exit_fn = DoSExitWorker.get_exits_fun(positions, watcher_url)
Task.async(exit_fn)
end)
|> Enum.map(&compute_std_exits_statistics/1)
end
@doc """
Fetches utxo positions for a given users list.
"""
@spec get_exitable_utxos([%{addr: binary()}], watcher_url: binary()) :: [non_neg_integer()]
def get_exitable_utxos(entities, watcher_url \\ @watcher_url)
def get_exitable_utxos(users, watcher_url) when is_list(users),
do: Enum.map(users, &get_exitable_utxos(&1, watcher_url)) |> Enum.concat()
def get_exitable_utxos(%{addr: addr}, watcher_url) when is_binary(addr),
do: get_exitable_utxos(addr, watcher_url)
def get_exitable_utxos(addr, watcher_url) do
{:ok, utxos} = WatcherClient.get_exitable_utxos(addr, watcher_url)
utxos
end
def watcher_synchronize(watcher_url \\ @watcher_url) do
Eth.WaitFor.repeat_until_ok(fn -> watcher_synchronized?(watcher_url) end)
# NOTE: allowing some more time for the dust to settle on the synced Watcher
# otherwise some of the freshest UTXOs to exit will appear as missing on the Watcher
# related issue to remove this `sleep` and fix properly is https://github.com/omisego/elixir-omg/issues/1031
Process.sleep(2000)
end
defp valid_exit_data({:ok, response}), do: valid_exit_data(response)
defp valid_exit_data(%{proof: _}), do: true
defp valid_exit_data(_), do: false
defp compute_std_exits_statistics(task) do
{time, exits} = Task.await(task, :infinity)
valid? = Enum.map(exits, &valid_exit_data/1)
%{
span_ms: div(time, @micros_in_millisecond),
corrects_count: Enum.count(valid?, & &1),
errors_count: Enum.count(valid?, &(!&1))
}
end
# This function is prepared to be called in `WaitFor.repeat_until_ok`.
# It repeatedly ask for Watcher's `/status.get` until Watcher consume mined block
defp watcher_synchronized?(watcher_url) do
with {:ok, status} <- WatcherClient.get_status(watcher_url) do
watcher_synchronized_to_mined_block?(status)
else
_ -> :repeat
end
end
defp watcher_synchronized_to_mined_block?(%{
last_mined_child_block_number: last_mined_child_block_number,
last_validated_child_block_number: last_validated_child_block_number
})
when last_mined_child_block_number == last_validated_child_block_number and
last_mined_child_block_number > 0 do
_ = Logger.debug("Synced to blknum: #{last_validated_child_block_number}")
{:ok, last_validated_child_block_number}
end
defp watcher_synchronized_to_mined_block?(_), do: :repeat
end
|
apps/omg_performance/lib/omg_performance/byzantine_events.ex
| 0.818556
| 0.801897
|
byzantine_events.ex
|
starcoder
|
defmodule FP.Intro do
@moduledoc """
Elixir solutions for HackerRank functional programming challenges.
"""
@doc """
Reverse a list - without using Enum.reverse
https://www.hackerrank.com/challenges/fp-reverse-a-list/problem
"""
@spec reverse(list, list) :: list(integer)
def reverse([], results), do: results
def reverse(numbers, results) when length(numbers) >= 1 and length(numbers) <= 100 do
reverse(numbers |> tl, [numbers |> hd | results])
end
def reverse(_numbers, _results), do: []
@doc """
Evaluating e^x
https://www.hackerrank.com/challenges/eval-ex/problem
"""
@spec exp(float, integer, list) :: float
def exp(x, no_of_terms, results \\ 1)
def exp(_x, 0, results), do: results |> Float.round(4)
def exp(x, no_of_terms, results) do
exp(x, no_of_terms - 1, results + nth_term(x, no_of_terms))
end
defp nth_term(x, n), do: :math.pow(x, n) / factorial(n)
defp factorial(0), do: 1
defp factorial(n) when n > 0, do: Enum.reduce(1..n, 1, &*/2)
@doc """
Area of a curve by definite integrals
https://www.hackerrank.com/challenges/area-under-curves-and-volume-of-revolving-a-curv/problem
"""
@spec area(list(integer), list(integer), integer, integer, float) :: float
def area(c, p, l, r, dx) do
y = fn x -> f(c, p, x) end
# total number of sub ntervals
n = ((r - l) / dx) |> trunc
# according to the formula provided via HackerRank
# limit definition by definite integrals
0..n
|> Enum.map(&(y.(l + &1 * dx) * dx))
|> Enum.sum()
|> Float.round(1)
end
@doc """
Volume of a curve by definite integrals
https://www.hackerrank.com/challenges/area-under-curves-and-volume-of-revolving-a-curv/problem
"""
# according to the volume formula in
# https://www.wyzant.com/resources/lessons/math/calculus/integration/finding_volume
@spec volume(list(integer), list(integer), integer, integer, float) :: float
def volume(c, p, l, r, dx) do
y = fn x -> :math.pow(f(c, p, x), 2) end
# total number of sub ntervals
n = ((r - l) / dx) |> trunc
0..n
|> Enum.map(&(y.(l + &1 * dx) * dx))
|> Enum.sum()
|> :erlang.*(:math.pi())
|> Float.round(1)
end
# construct algebraic series expression
defp f(c, p, x) when is_list(c) and is_list(p) do
Enum.zip(c, p)
|> Enum.reduce(0, fn cp, acc -> acc + f(cp, x) end)
end
defp f({c, p}, x), do: c * :math.pow(x, p)
@doc """
Function or not - validating values x, y or x and f(x).
Given a list of x, y values, this function determines
(true, false) whether the values could be input/output
for a valid function.
https://www.hackerrank.com/challenges/functions-or-not/problem
"""
@spec function?(list(tuple)) :: boolean
def function?(xy) do
xy
# maps to x, y
|> Enum.group_by(&elem(&1, 0), &elem(&1, 1))
# find any x with multiple y values, i.e. invalid relation
|> Enum.find(fn {_k, v} -> length(v) > 1 end)
# should be nil if all x, y pairs have 1-1 unique mapping
|> is_nil
end
@doc """
Compute the Perimeter of a Polygon.
https://www.hackerrank.com/challenges/lambda-march-compute-the-perimeter-of-a-polygon/problem
"""
@spec perimeter(list(tuple)) :: float
def perimeter(coordinates), do: _perimeter(coordinates ++ [coordinates |> hd])
# recursively compute perimeter, when "coordinates"
# contain only 1 pt (last pt), return results
defp _perimeter(coordinates, distance \\ 0.0)
defp _perimeter(coordinates, distance) when length(coordinates) == 1, do: distance |> Float.round(7)
defp _perimeter([p1 | coordinates], distance) do
p2 = coordinates |> hd
_perimeter(coordinates, distance + _perimeter(p1, p2))
end
defp _perimeter({x1, y1}, {x2, y2}), do: :math.sqrt(:math.pow(y2 - y1, 2) + :math.pow(x2 - x1, 2))
@doc """
Compute the area of a Polygon.
https://www.hackerrank.com/challenges/lambda-march-compute-the-area-of-a-polygon/problem
"""
@spec area_polygon(list(tuple)) :: float
def area_polygon(coordinates), do: _area_polygon(coordinates ++ [coordinates |> hd])
# using irregular polygon formula in https://www.mathopenref.com/coordpolygonarea.html
defp _area_polygon(coordinates, x \\ 0.0)
defp _area_polygon(coordinates, x) when length(coordinates) == 1, do: abs(x / 2) |> Float.round(7)
defp _area_polygon([p1 | coordinates], x) do
p2 = coordinates |> hd
_area_polygon(coordinates, x + _area_polygon(p1, p2))
end
defp _area_polygon({x1, y1}, {x2, y2}), do: x1 * y2 - y1 * x2
end
|
lib/fp/intro.ex
| 0.861057
| 0.706482
|
intro.ex
|
starcoder
|
defmodule OAAS.Job.Replay do
@moduledoc "A replay recording/uploading job."
alias OAAS.Osu
alias OAAS.Job
import OAAS.Utils
use Bitwise, only_operators: true
@derive Jason.Encoder
@enforce_keys [:player, :beatmap, :replay, :upload, :skin]
defstruct @enforce_keys ++ [:reddit_id]
@type t :: %__MODULE__{
player: %{user_id: integer, username: String.t()},
beatmap: %{
beatmap_id: integer,
artist: String.t(),
title: String.t(),
version: String.t()
},
replay: %{osr: String.t(), length: integer},
upload: %{title: String.t(), description: String.t(), tags: [String.t()]},
skin: %{name: String.t(), url: String.t()},
reddit_id: String.t() | nil
}
@doc "Describes a job."
@spec describe(Job.t()) :: String.t()
def describe(j) do
player = "#{j.data.player.username} (https://osu.ppy.sh/u/#{j.data.player.user_id})"
reddit = if is_nil(j.data.reddit_id), do: "None", else: "https://redd.it/#{j.data.reddit_id}"
beatmap =
"#{j.data.beatmap.artist} - #{j.data.beatmap.title} [#{j.data.beatmap.version}] (https://osu.ppy.sh/b/#{
j.data.beatmap.beatmap_id
})"
"""
```yml
#{Job.describe(j)}
Player: #{player}
Beatmap: #{beatmap}
Video: #{j.data.upload.title}
Skin: #{(j.data.skin || %{})[:name] || "None"}
Reddit: #{reddit}
Replay:
Date: #{j.data.replay.timestamp}
Mods: #{Osu.mods_to_string(j.data.replay.mods)}
Combo: #{j.data.replay.combo}
Score: #{j.data.replay.score}
Accuracy: #{j.data.replay |> Osu.accuracy() |> :erlang.float_to_binary(decimals: 2)}%
Length: #{display_time(j.data.replay.length)}
```
"""
end
@doc "Creates a replay job from a Reddit post."
@spec from_reddit(String.t(), String.t()) :: {:ok, Job.t()} | {:error, term}
def from_reddit(id, title) do
with {:ok, username} <- extract_username(title),
{:ok, map_name} <- extract_map_name(title),
{:ok, mods} <- extract_mods(title),
{:ok, %{} = player} <- OsuEx.API.get_user(username, event_days: 31),
{:ok, %{} = beatmap} <- search_beatmap(player, map_name),
{:ok, osr} <- OsuEx.Osr.download_replay(beatmap.beatmap_id, player.user_id, mods: mods),
{:ok, replay} <- OsuEx.Osr.parse(osr) do
skin = Osu.skin(player.username)
Job.put(
type: Job.type(__MODULE__),
status: Job.status(:pending),
data: %__MODULE__{
player: Map.delete(player, :events),
beatmap: beatmap,
replay:
Map.merge(replay, %{
replay_data: nil,
osr: Base.encode64(osr),
length: Osu.replay_length(beatmap, replay.mods)
}),
upload: upload_data(player, beatmap, replay, skin),
skin: skin,
reddit_id: id
}
)
else
{:ok, nil} -> {:error, :player_found}
{:error, reason} -> {:error, reason}
end
end
@doc "Creates a replay job from a replay link."
@spec from_osr(String.t(), String.t() | nil) :: {:ok, Job.t()} | {:error, term}
def from_osr(url, skin_override \\ nil) do
with {:ok, %{body: osr}} <- HTTPoison.get(url),
{:ok, replay} = OsuEx.Osr.parse(osr),
{:ok, player} = OsuEx.API.get_user(replay.player),
{:ok, beatmap} = OsuEx.API.get_beatmap(replay.beatmap_md5) do
skin = Osu.skin(skin_override || player.username)
Job.put(
type: Job.type(__MODULE__),
status: Job.status(:pending),
data: %__MODULE__{
player: Map.delete(player, :events),
beatmap: beatmap,
replay:
Map.merge(replay, %{
replay_data: nil,
osr: Base.encode64(osr),
length: Osu.replay_length(beatmap, replay.mods)
}),
upload: upload_data(player, beatmap, replay, skin),
skin: skin
}
)
else
{:error, reason} -> {:error, reason}
end
end
# Get pp for a play as a string.
@spec pp_string(map, map, map) :: String.t() | nil
defp pp_string(player, beatmap, replay) do
case Osu.pp(player, beatmap, replay) do
{:ok, pp} ->
:erlang.float_to_binary(pp, decimals: 0) <> "pp"
{:error, reason} ->
notify(:warn, "Looking up score for pp value failed.", reason)
nil
end
end
# Generate the upload description.
@spec description(map, map, map) :: String.t()
defp description(%{username: name, user_id: user_id}, %{beatmap_id: beatmap_id}, %{name: skin}) do
skin_name =
if String.starts_with?(skin, "CirclePeople") do
"Default Skin"
else
"#{name}'s Skin"
end
"""
#{name}'s Profile: https://osu.ppy.sh/u/#{user_id} | #{skin_name}'s Skin: https://circle-people.com/skins
Map: https://osu.ppy.sh/b/#{beatmap_id} | Click "Show more" for an explanation what this video and free to play rhythm game is all about!
------------------------------------------------------
osu! is the perfect game if you're looking for ftp games as
osu! is a free to play online rhythm game, which you can use as a rhythm trainer online with lots of gameplay music!
https://osu.ppy.sh
osu! has online rankings, multiplayer and boasts a community with over 500,000 active users!
Title explanation:
PlayerName | Artist - Song [Difficulty] +ModificationOfMap PlayerAccuracyOnMap% PointsAwardedForThisPlayPP
Want to support what we do? Check out our Patreon!
https://patreon.com/CirclePeople
Join the CirclePeople community!
https://discord.gg/CirclePeople
Don't want to miss any pro plays? Subscribe or follow us!
Twitter: https://twitter.com/CirclePeopleYT
Facebook: https://facebook.com/CirclePeople
Want some sweet custom covers for your tablet?
https://yangu.pw/shop - Order here!
#CirclePeople
#osu
##{name}
"""
end
# Generate the upload tags.
@spec upload_tags(map, map) :: [String.t()]
defp upload_tags(_player, _beatmap) do
# TODO
[]
end
@title_limit 100
# Get upload data for a play.
@spec upload_data(map, map, map, map) :: map
defp upload_data(player, beatmap, replay, skin) do
mods = Osu.mods_to_string(replay.mods)
mods = if mods === "", do: nil, else: mods
fc = if replay.perfect?, do: "FC", else: nil
percent = :erlang.float_to_binary(Osu.accuracy(replay), decimals: 2) <> "%"
pp = pp_string(player, beatmap, replay)
extra =
[mods, percent, fc, pp]
|> Enum.reject(&is_nil/1)
|> Enum.join(" ")
map_name = "#{beatmap.artist} - #{beatmap.title} [#{beatmap.version}]"
title = String.trim("#{Osu.mode(replay.mode)} | #{player.username} | #{map_name} #{extra}")
notify(:debug, "Computed video title: #{title}.")
yt_title = if String.length(title) > @title_limit, do: "Placeholder Title", else: title
desc = title <> "\n" <> description(player, beatmap, skin)
tags = upload_tags(player, beatmap)
%{title: yt_title, description: desc, tags: tags}
end
# Get the player name from a post title.
@spec extract_username(String.t()) :: {:ok, String.t()} | {:error, :no_player_match}
defp extract_username(title) do
case Regex.run(~r/(.+?)\|/, title) do
[_, cap] ->
username =
cap
|> (&Regex.replace(~r/\(.*?\)/, &1, "")).()
|> String.trim()
notify(:debug, "Extracted username '#{username}'.")
{:ok, username}
nil ->
{:error, :no_player_match}
end
end
# Get the beatmap name from a post title.
@spec extract_map_name(String.t()) :: {:ok, String.t()} | {:error, :no_map_match}
defp extract_map_name(title) do
case Regex.run(~r/\|(.+?)-(.+?)\[(.+)\]/, title) do
[_, artist, title, diff] ->
s = "#{String.trim(artist)} - #{String.trim(title)} [#{String.trim(diff)}]"
notify(:debug, "Extracted map name: '#{s}'.")
{:ok, s}
nil ->
{:error, :no_map_match}
end
end
# Get the mods (as a number) from a post title.
@spec extract_mods(String.t()) :: {:ok, integer | nil}
defp extract_mods(title) do
{:ok,
case Regex.run(~r/\+ ?([A-Z,]+)/, title) do
[_, mods] ->
notify(:debug, "Extracted mods: '+#{mods}'.")
Osu.mods_from_string(mods)
nil ->
nil
end}
end
# Look for a beatmap by name in a player's activity.
@spec search_beatmap(map, binary) :: {:ok, map} | {:error, :beatmap_not_found}
defp search_beatmap(player, map_name) do
notify(:debug, "Searching for: '#{map_name}'.")
map_name = String.downcase(map_name)
try do
case search_events(player, map_name) do
{:ok, beatmap} -> throw(beatmap)
_ -> :noop
end
case search_recent(player, map_name) do
{:ok, beatmap} -> throw(beatmap)
_ -> :noop
end
case search_best(player, map_name) do
{:ok, beatmap} -> throw(beatmap)
_ -> :noop
end
{:error, :beatmap_not_found}
catch
beatmap ->
notify(:debug, "Found beatmap #{beatmap.beatmap_id}.")
{:ok, beatmap}
end
end
# Search a player's recent events for a beatmap.
@spec search_events(map, String.t()) :: {:ok, map} | {:error, term}
defp search_events(%{events: events}, map_name) do
case Enum.find(events, fn %{display_html: html} ->
html
|> String.downcase()
|> String.contains?(map_name)
end) do
%{beatmap_id: id} -> OsuEx.API.get_beatmap(id)
_ -> {:error, nil}
end
end
# Search a player's recent plays for a beatmap.
@spec search_recent(%{user_id: non_neg_integer}, String.t()) :: {:ok, map} | {:error, term}
defp search_recent(%{user_id: id}, map_name) do
case OsuEx.API.get_user_recent(id, limit: 50) do
{:ok, scores} -> search_scores(scores, map_name)
{:error, reason} -> {:error, reason}
end
end
# Search a player's best plays for a beatmap.
@spec search_best(map, String.t()) :: {:ok, map} | {:error, term}
defp search_best(%{user_id: id}, map_name) do
case OsuEx.API.get_user_best(id, limit: 100) do
{:ok, scores} -> search_scores(scores, map_name)
{:error, reason} -> {:error, reason}
end
end
# Search a list of scores for a beatmap.
@spec search_scores([map], String.t()) :: {:ok, map} | {:error, :not_found}
defp search_scores(scores, map_name) do
scores
|> Enum.uniq_by(&Map.get(&1, :beatmap_id))
|> Enum.find_value({:error, :not_found}, fn %{beatmap_id: map_id} ->
case OsuEx.API.get_beatmap(map_id) do
{:ok, %{artist: artist, title: title, version: version} = beatmap} ->
if strcmp("#{artist} - #{title} [#{version}]", map_name) do
{:ok, beatmap}
else
false
end
_ ->
false
end
end)
end
@osusearch_url "https://osusearch.com/api/search"
@spec osusearch_key :: String.t()
defp osusearch_key, do: Application.fetch_env!(:oaas, :osusearch_key)
@spec search_osusearch(String.t()) :: {:ok, map} | {:error, :not_found}
def search_osusearch(map_name) do
[_, artist, title, diff] = Regex.run(~r/(.+?) - (.+?) \[(.+?)\]/, map_name)
params = URI.encode_query(key: osusearch_key(), artist: artist, title: title, diff_name: diff)
with {:ok, %{status_code: 200, body: body}} <- HTTPoison.get(@osusearch_url <> "?" <> params),
{:ok, %{"beatmaps" => [_h | _t] = beatmaps}} <- Jason.decode(body) do
beatmaps =
beatmaps
|> atom_map()
|> Enum.filter(fn %{artist: a, title: t, difficulty_name: d} ->
strcmp("#{a} - #{t} [#{d}]", map_name)
end)
if Enum.empty?(beatmaps) do
{:error, :not_found}
else
beatmaps
|> Enum.max_by(&Map.get(&1, :favorites), fn -> nil end)
|> hd()
|> Map.get(:beatmap_id)
|> OsuEx.API.get_beatmap()
end
else
_ -> {:error, :not_found}
end
end
end
|
server/lib/oaas/job/replay.ex
| 0.756268
| 0.599397
|
replay.ex
|
starcoder
|
defmodule Stripper.Whitespace do
@moduledoc """
This module exists for dealing with whitespace. A space is a space is a space,
right? Wrong. There are multiple [unicode](https://home.unicode.org/)
characters that represent whitespace: tabs, newlines, line-feeds, and a slew
of [lesser-known characters](http://jkorpela.fi/chars/spaces.html) that are
technically different entities but all of which could be referred to as
"space".
Sometimes, too many distinctions are a bad thing. A human might be able to read
text peppered with a dozen different variations in space characters, but some
processes may not. This module offers functions that strip away all the
nonsense and leaves bare the simple spaces as nature intended.
"""
use Stripper.Parser.WhitespaceParser
use Stripper.Parser.FallbackParser
@doc """
The `normalize/1` function works the same way as the `normalize!/1` function
but it returns its output as an `:ok` tuple.
This is a convenience function provided to have idiomatic function specs.
## Usage Examples
iex> normalize("a \\t\\tbunch\\n of \\f nonsense\\n")
{:ok, "a bunch of nonsense"}
"""
@spec normalize(string :: String.t()) :: {:ok, String.t()}
def normalize(string) when is_binary(string) do
{:ok, parse(string, "", %{in_space: false})}
end
@doc """
Strip out any redundant spaces or other whitespace characters and normalize
them to simple spaces (i.e. `" "`). Multiple spaces all get collapsed down to
one space. Newlines, carriage returns, tabs, line-feeds et al all get replaced
with a regular space character.
Functionally, this is equivalent to something like the following:
iex> value = "your value here"
iex> String.trim(Regex.replace(~r/\\s+/u, value, " "))
## Examples
iex> normalize!("a \\t\\tbunch\\n of \\f nonsense\\n")
"a bunch of nonsense"
iex> normalize!(" trim me please ")
"trim me please"
iex> normalize!("foo\\n\\n\\nbar")
"foo bar"
iex> normalize!("\\u2009unicode\\u2008space\\u2003")
"unicode space"
"""
@spec normalize!(string :: String.t()) :: String.t()
def normalize!(string) when is_binary(string) do
parse(string, "", %{in_space: false})
end
end
|
lib/stripper/whitespace.ex
| 0.859103
| 0.60133
|
whitespace.ex
|
starcoder
|
defmodule Kino.DataTable do
@moduledoc """
A widget for interactively viewing enumerable data.
The data must be an enumerable of records, where each
record is either map, struct, keyword list or tuple.
## Examples
data = [
%{id: 1, name: "Elixir", website: "https://elixir-lang.org"},
%{id: 2, name: "Erlang", website: "https://www.erlang.org"}
]
Kino.DataTable.new(data)
The tabular view allows you to quickly preview the data
and analyze it thanks to sorting capabilities.
data = Process.list() |> Enum.map(&Process.info/1)
Kino.DataTable.new(
data,
keys: [:registered_name, :initial_call, :reductions, :stack_size]
)
"""
@behaviour Kino.Table
alias Kino.Utils
@type t :: Kino.Table.t()
@doc """
Starts a widget process with enumerable tabular data.
## Options
* `:keys` - a list of keys to include in the table for each record.
The order is reflected in the rendered table. For tuples use 0-based
indices. Optional.
* `:sorting_enabled` - whether the widget should support sorting the data.
Sorting requires traversal of the whole enumerable, so it may not be
desirable for lazy enumerables. Defaults to `true` if data is a list
and `false` otherwise.
* `:show_underscored` - whether to include record keys starting with underscore.
This option is ignored if `:keys` is also given. Defaults to `false`.
"""
@spec new(Enum.t(), keyword()) :: t()
def new(data, opts \\ []) do
validate_data!(data)
keys = opts[:keys]
sorting_enabled = Keyword.get(opts, :sorting_enabled, is_list(data))
show_underscored = Keyword.get(opts, :show_underscored, false)
opts = %{
data: data,
keys: keys,
sorting_enabled: sorting_enabled,
show_underscored: show_underscored
}
Kino.Table.new(__MODULE__, opts)
end
# Validate data only if we have a whole list upfront
defp validate_data!(data) when is_list(data) do
Enum.reduce(data, nil, fn record, type ->
case record_type(record) do
:other ->
raise ArgumentError,
"expected record to be either map, struct, tuple or keyword list, got: #{inspect(record)}"
first_type when type == nil ->
first_type
^type ->
type
other_type ->
raise ArgumentError,
"expected records to have the same data type, found #{type} and #{other_type}"
end
end)
end
defp validate_data!(_data), do: :ok
defp record_type(record) do
cond do
is_struct(record) -> :struct
is_map(record) -> :map
is_tuple(record) -> :tuple
Keyword.keyword?(record) -> :keyword_list
true -> :other
end
end
@impl true
def init(opts) do
%{
data: data,
keys: keys,
sorting_enabled: sorting_enabled,
show_underscored: show_underscored
} = opts
features = Kino.Utils.truthy_keys(pagination: true, sorting: sorting_enabled)
info = %{name: "Data", features: features}
total_rows = Enum.count(data)
{:ok, info,
%{
data: data,
total_rows: total_rows,
keys: keys,
show_underscored: show_underscored
}}
end
@impl true
def get_data(rows_spec, state) do
records = get_records(state.data, rows_spec)
keys =
if keys = state.keys do
keys
else
keys = Utils.Table.keys_for_records(records)
if state.show_underscored do
keys
else
Enum.reject(keys, &underscored?/1)
end
end
columns = Utils.Table.keys_to_columns(keys)
rows = Enum.map(records, &Utils.Table.record_to_row(&1, keys))
{:ok, %{columns: columns, rows: rows, total_rows: state.total_rows}, state}
end
defp get_records(data, rows_spec) do
sorted_data =
if order_by = rows_spec[:order_by] do
Enum.sort_by(data, &Utils.Table.get_field(&1, order_by), rows_spec.order)
else
data
end
Enum.slice(sorted_data, rows_spec.offset, rows_spec.limit)
end
defp underscored?(key) when is_atom(key) do
key |> Atom.to_string() |> String.starts_with?("_")
end
defp underscored?(_key), do: false
end
|
lib/kino/data_table.ex
| 0.865906
| 0.661942
|
data_table.ex
|
starcoder
|
defmodule Constants do
@moduledoc """
An alternative to use @constant_name value approach to defined reusable
constants in elixir.
This module offers an approach to define these in a
module that can be shared with other modules. They are implemented with
macros so they can be used in guards and matches
## Examples:
Create a module to define your shared constants
defmodule MyConstants do
use Constants
define something, 10
define another, 20
end
Use the constants
defmodule MyModule do
require MyConstants
alias MyConstants, as: Const
def myfunc(item) when item == Const.something, do: Const.something + 5
def myfunc(item) when item == Const.another, do: Const.another
end
"""
defmacro __using__(_opts) do
quote do
import Constants
end
end
@doc "Define a constant"
defmacro constant(name, value) do
quote do
defmacro unquote(name), do: unquote(value)
end
end
@doc "Define a constant. An alias for constant"
defmacro define(name, value) do
quote do
constant unquote(name), unquote(value)
end
end
@doc """
Import an hrl file.
Create constants for each -define(NAME, value).
"""
defmacro import_hrl(file_name) do
list = parse_file file_name
quote bind_quoted: [list: list] do
for {name, value} <- list do
defmacro unquote(name)(), do: unquote(value)
end
end
end
defp parse_file(file_name) do
for line <- File.stream!(file_name, [], :line) do
parse_line line
end
|> Enum.filter(&(not is_nil(&1)))
end
defp parse_line(line) do
case Regex.run ~r/-define\((.+),(.+)\)\./, line do
nil -> nil
[_, name, value] ->
{String.strip(name) |> String.downcase |> String.to_atom, String.strip(value) |> parse_value}
_ -> nil
end
end
defp parse_value(string) do
case Integer.parse string do
:error -> filter_string(string)
{num, _} -> num
end
end
defp filter_string(string), do: String.replace(string, "\"", "")
end
|
lib/ucx_chat/constants.ex
| 0.744006
| 0.5144
|
constants.ex
|
starcoder
|
require Utils
require Program
defmodule D16 do
@moduledoc """
--- Day 16: Flawed Frequency Transmission ---
You're 3/4ths of the way through the gas giants. Not only do roundtrip signals to Earth take five hours, but the signal quality is quite bad as well. You can clean up the signal with the Flawed Frequency Transmission algorithm, or FFT.
As input, FFT takes a list of numbers. In the signal you received (your puzzle input), each number is a single digit: data like 15243 represents the sequence 1, 5, 2, 4, 3.
FFT operates in repeated phases. In each phase, a new list is constructed with the same length as the input list. This new list is also used as the input for the next phase.
Each element in the new list is built by multiplying every value in the input list by a value in a repeating pattern and then adding up the results. So, if the input list were 9, 8, 7, 6, 5 and the pattern for a given element were 1, 2, 3, the result would be 9*1 + 8*2 + 7*3 + 6*1 + 5*2 (with each input element on the left and each value in the repeating pattern on the right of each multiplication). Then, only the ones digit is kept: 38 becomes 8, -17 becomes 7, and so on.
While each element in the output array uses all of the same input array elements, the actual repeating pattern to use depends on which output element is being calculated. The base pattern is 0, 1, 0, -1. Then, repeat each value in the pattern a number of times equal to the position in the output list being considered. Repeat once for the first element, twice for the second element, three times for the third element, and so on. So, if the third element of the output list is being calculated, repeating the values would produce: 0, 0, 0, 1, 1, 1, 0, 0, 0, -1, -1, -1.
When applying the pattern, skip the very first value exactly once. (In other words, offset the whole pattern left by one.) So, for the second element of the output list, the actual pattern used would be: 0, 1, 1, 0, 0, -1, -1, 0, 0, 1, 1, 0, 0, -1, -1, ....
After using this process to calculate each element of the output list, the phase is complete, and the output list of this phase is used as the new input list for the next phase, if any.
After 100 phases of FFT, what are the first eight digits in the final output list?
--- Part Two ---
Now that your FFT is working, you can decode the real signal.
The real signal is your puzzle input repeated 10000 times. Treat this new signal as a single input list. Patterns are still calculated as before, and 100 phases of FFT are still applied.
The first seven digits of your initial input signal also represent the message offset. The message offset is the location of the eight-digit message in the final output list. Specifically, the message offset indicates the number of digits to skip before reading the eight-digit message. For example, if the first seven digits of your initial input signal were 1234567, the eight-digit message would be the eight digits after skipping 1,234,567 digits of the final output list. Or, if the message offset were 7 and your final output list were 98765432109876543210, the eight-digit message would be 21098765. (Of course, your real message offset will be a seven-digit number, not a one-digit number like 7.)
Here is the eight-digit message in the final output list after 100 phases. The message offset given in each input has been highlighted. (Note that the inputs given below are repeated 10000 times to find the actual starting input lists.)
After repeating your input signal 10000 times and running 100 phases of FFT, what is the eight-digit message embedded in the final output list?
"""
@behaviour Day
def fill(arr, _x, 0), do: arr
def fill(arr, x, n), do: fill([x | arr], x, n - 1)
def fill(x, n), do: fill([], x, n)
def part_1(digits) do
length = length(digits)
length2 = div(length, 2)
master = [0, 1, 0, -1]
masks =
Enum.map(1..length2, fn x ->
master
|> Stream.map(&fill(&1, x))
|> Stream.concat()
|> Stream.cycle()
|> Stream.drop(1)
|> Enum.take(length)
end)
Enum.reduce(1..100, digits, fn _i, state ->
Enum.map(masks, fn mask ->
Enum.zip(mask, state)
|> Enum.map(fn {m, d} -> m * d end)
|> Enum.sum()
|> abs
|> rem(10)
end) ++
(Enum.drop(state, length2)
|> Enum.reverse()
|> Enum.scan(&rem(&1 + &2, 10))
|> Enum.reverse())
end)
|> Enum.take(8)
|> Integer.undigits()
end
def part_2(digits) do
offset = digits |> Enum.take(7) |> Integer.undigits()
part_2_digits =
[digits]
|> Stream.cycle()
|> Enum.take(10000)
|> List.flatten()
|> Enum.drop(offset)
|> Enum.reverse()
Enum.reduce(1..100, part_2_digits, fn _i, state ->
Enum.scan(state, &rem(&1 + &2, 10))
end)
|> Enum.reverse()
|> Enum.take(8)
|> Integer.undigits()
end
def solve(input) do
input = Utils.to_int(input)
digits = Integer.digits(input)
part_1 = part_1(digits)
part_2 = part_2(digits)
{
part_1,
part_2
}
end
end
|
lib/days/16.ex
| 0.855202
| 0.918114
|
16.ex
|
starcoder
|
defmodule Redisank do
defmodule Base do
use Rdtype,
uri: Application.get_env(:redisank, :redis)[:ranking],
coder: Redisank.Coder,
type: :sorted_set
end
@format "{YYYY}{0M}{0D}"
def namekey(date, key) do
"#{key}/#{Timex.format! date, @format}"
end
def incr(id, time \\ :calendar.local_time)
def incr(id, time) when is_integer(id) do
time
|> Timex.format!(@format)
|> Base.zincrby(1, id)
end
def incr(_, _), do: :error
def sum(:all), do: sum(nil, :all)
def sum(time, :all) do
time = time || :calendar.local_time
sum time, :weekly
sum time, :monthly
sum time, :quarterly
sum time, :biannually
sum time, :yearly
end
def del(from, to, :daily) do
date(from, to, :daily)
|> Base.del
end
def del(from, to, :weekly) do
date(from, to, :weekly)
|> Base.del
end
def del(from, to, :monthly) do
date(from, to, :monthly)
|> Base.del
end
def date(from, to, :daily) do
0..abs(Timex.diff(from, to, :days))
|> Enum.map(&Timex.shift from, days: &1)
|> Enum.map(&Timex.format! &1, @format)
end
def date(from, to, :weekly) do
0..abs(Timex.diff(from, to, :weeks))
|> Enum.map(&Timex.shift Timex.beginning_of_week(from), weeks: &1)
|> Enum.map(&namekey(&1, :weekly))
end
def date(from, to, :monthly) do
0..abs(Timex.diff(from, to, :months))
|> Enum.map(&Timex.shift Timex.beginning_of_month(from), months: &1)
|> Enum.map(&namekey(&1, :monthly))
end
def sum(:weekly), do: sum(nil, :weekly)
def sum(time, :weekly) do
time = time || :calendar.local_time
from = time |> Timex.beginning_of_week
to = time |> Timex.end_of_week
dates = date from, to, :daily
Base.zunionstore namekey(from, :weekly), dates, aggregate: "sum"
end
def sum(:monthly), do: sum(nil, :monthly)
def sum(time, :monthly) do
time = time || :calendar.local_time
from = time |> Timex.beginning_of_month
to = time |> Timex.end_of_month
dates = date from, to, :weekly
Base.zunionstore namekey(from, :monthly), dates, aggregate: "sum"
end
def sum(:quarterly), do: sum(nil, :quarterly)
def sum(time, :quarterly) do
time = time || :calendar.local_time
from = time |> Timex.beginning_of_quarter
to = time |> Timex.end_of_quarter
dates = date from, to, :monthly
Base.zunionstore namekey(from, :quarterly), dates, aggregate: "sum"
end
def sum(:biannually), do: sum(nil, :biannually)
def sum(time, :biannually) do
time = time || :calendar.local_time
from = time |> beginning_of_biannual
to = time |> end_of_biannual
dates =
0..Timex.diff(from, to, :months)
|> Enum.map(&Timex.shift Timex.beginning_of_month(from), months: &1)
|> Enum.map(&namekey(&1, :quarterly))
Base.zunionstore namekey(from, :biannually), dates, aggregate: "sum"
end
def sum(:yearly), do: sum(nil, :yearly)
def sum(time, :yearly) do
time = time || :calendar.local_time
from = time |> Timex.beginning_of_year
to = time |> Timex.end_of_year
dates =
0..Timex.diff(from, to, :months)
|> Enum.map(&Timex.shift Timex.beginning_of_month(from), months: &1)
|> Enum.map(&namekey(&1, :biannually))
Base.zunionstore namekey(from, :yearly), dates, aggregate: "sum"
end
def top(:all, from, to), do: top :all, from, to, []
def top(:all, from, to, opts) do
%{
weekly: top(:weekly, from, to, opts),
monthly: top(:monthly, from, to, opts),
quarterly: top(:quarterly, from, to, opts),
biannually: top(:biannually, from, to, opts),
yearly: top(:yearly, from, to, opts),
}
end
def top(key, from, to, opts) do
time = :calendar.local_time
key =
case :"#{key}" do
:weekly -> namekey(Timex.beginning_of_week(time), :weekly)
:monthly -> namekey(Timex.beginning_of_month(time), :monthly)
:quarterly -> namekey(Timex.beginning_of_quarter(time), :quarterly)
:biannually -> namekey(beginning_of_biannual(time), :biannually)
:yearly -> namekey(Timex.beginning_of_year(time), :yearly)
_ -> key
end
score = Keyword.get opts, :withscores, false
Base.zrevrangebyscore key, "+inf", "-inf", withscores: score, limit: [from, to]
end
def top(:all), do: top :all, 0, 50, []
def top(key), do: top key, 0, 50, []
def beginning_of_biannual({{year, month, _}, _}) do
case month do
m when m in 1..6 ->
Timex.beginning_of_month Timex.to_datetime({{year, 1, 1}, {0, 0, 0}})
m when m in 7..12 ->
Timex.beginning_of_month Timex.to_datetime({{year, 6, 1}, {0, 0, 0}})
end
end
def end_of_biannual({{year, month, _}, _}) do
case month do
m when m in 1..6 ->
Timex.end_of_month Timex.to_datetime({{year, 6, 1}, {0, 0, 0}})
m when m in 7..12 ->
Timex.end_of_month Timex.to_datetime({{year, 12, 1}, {0, 0, 0}})
end
end
end
|
lib/redisank.ex
| 0.540681
| 0.419678
|
redisank.ex
|
starcoder
|
defmodule PrivCheck.DocChecker do
@moduledoc """
Find docs of module, caching results for performance
Depends heavily on the documentation layout described and implemented in EEP 48:
http://erlang.org/eeps/eep-0048.html
"""
@type visibility :: :public | :private | :not_found
@doc """
Check if the given module is considered public
"""
def public?(mod) when is_atom(mod) do
case mod_visibility(mod) do
:public -> true
:private -> false
:not_found -> false
end
end
def public_fun?({mod, fun, arity}) do
case mfa_visibility({mod, fun, arity}) do
:public -> true
:private -> false
:not_found -> false
:unknown -> true
end
end
@doc """
Check the visiblity type of the given module
"""
@spec mod_visibility(module()) :: visibility()
def mod_visibility(mod) do
case Code.fetch_docs(mod) do
# No @moduledoc annotation
{:docs_v1, _line, :elixir, _format, :none, _metadata, _docs} ->
:public
# @moduledoc false
{:docs_v1, _line, :elixir, _format, :hidden, _metadata, _docs} ->
:private
{:docs_v1, _line, :elixir, _format, moduledocs, _metadata, _docs} when is_map(moduledocs) ->
:public
{:error, _} ->
:not_found
end
end
@spec mfa_visibility(mfa()) :: visibility() | :unknown
def mfa_visibility({mod, fun, arity}) do
case Code.fetch_docs(mod) do
# Module has no @moduledoc annotation
{:docs_v1, _line, :elixir, _format, :none, _metadata, docs} ->
fun_visibility(fun, arity, docs)
# Module has @moduledoc false
{:docs_v1, _line, :elixir, _format, :hidden, _metadata, _docs} ->
:private
{:docs_v1, _line, :elixir, _format, moduledocs, _metadata, docs} when is_map(moduledocs) ->
case fun_visibility(fun, arity, docs) do
:not_found ->
case Keyword.get_values(mod.__info__(:functions), fun) do
[] ->
case Keyword.get_values(mod.__info__(:macros), fun) do
[] ->
:not_found
arities ->
if arity in arities, do: :public, else: :not_found
end
arities ->
if arity in arities, do: :public, else: :not_found
end
other ->
other
end
{:error, _error} ->
:unknown
end
end
@spec fun_visibility(atom(), non_neg_integer(), list()) :: visibility()
defp fun_visibility(fun, arity, docs) do
Enum.find_value(docs, :not_found, fn
# No @doc annotation
{{:function, ^fun, ^arity}, _line, _signature, :none, _metadata} ->
:public
# @doc false
{{:function, ^fun, ^arity}, _line, _signature, :hidden, _metadata} ->
:private
# has @doc entry
{{:function, ^fun, ^arity}, _line, _signature, func_docs, _metadata} when is_map(func_docs) ->
:public
# No @doc annotation
{{:macro, ^fun, ^arity}, _line, _signature, :none, _metadata} ->
:public
# @doc false
{{:macro, ^fun, ^arity}, _line, _signature, :hidden, _metadata} ->
:private
# has @doc entry
{{:macro, ^fun, ^arity}, _line, _signature, func_docs, _metadata} when is_map(func_docs) ->
:public
_ ->
false
end)
end
end
|
lib/priv_check/doc_checker.ex
| 0.766162
| 0.406302
|
doc_checker.ex
|
starcoder
|
defmodule ElixirRigidPhysics.Collision.Narrowphase do
@moduledoc """
Functions for generating collision manifolds for body pairs.
Supported:
* sphere-sphere
* sphere-capsule
* capsule-capsule
Planned:
* sphere-hull
* capsule-hull
* hull-hull
Check [here](http://media.steampowered.com/apps/valve/2015/DirkGregorius_Contacts.pdf) for a good summary of things.
"""
require ElixirRigidPhysics.Dynamics.Body, as: Body
require ElixirRigidPhysics.Geometry.Sphere, as: Sphere
require ElixirRigidPhysics.Geometry.Capsule, as: Capsule
require ElixirRigidPhysics.Geometry.Hull, as: Hull
require ElixirRigidPhysics.Collision.Contact, as: Contact
alias ElixirRigidPhysics.Collision.Intersection.SphereSphere
alias ElixirRigidPhysics.Collision.Intersection.SphereCapsule
alias ElixirRigidPhysics.Collision.Intersection.CapsuleCapsule
alias ElixirRigidPhysics.Collision.Intersection.SphereHull
alias ElixirRigidPhysics.Collision.Intersection.CapsuleHull
alias ElixirRigidPhysics.Collision.Intersection.HullHull
@doc """
Tests the intersection of two shapes.
"""
@spec test_intersection(Body.body(), Body.body()) :: Contact.contact_result()
def test_intersection(
Body.body(shape: Sphere.sphere()) = a,
Body.body(shape: Sphere.sphere()) = b
),
do: SphereSphere.check(a, b)
def test_intersection(
Body.body(shape: Sphere.sphere()) = a,
Body.body(shape: Capsule.capsule()) = b
),
do: SphereCapsule.check(a, b)
def test_intersection(
Body.body(shape: Capsule.capsule()) = b,
Body.body(shape: Sphere.sphere()) = a
),
do: SphereCapsule.check(a, b)
def test_intersection(
Body.body(shape: Sphere.sphere()) = a,
Body.body(shape: Hull.hull()) = b
),
do: SphereHull.check(a, b)
def test_intersection(
Body.body(shape: Hull.hull()) = b,
Body.body(shape: Sphere.sphere()) = a
),
do: SphereHull.check(a, b)
def test_intersection(
Body.body(shape: Capsule.capsule()) = a,
Body.body(shape: Capsule.capsule()) = b
),
do: CapsuleCapsule.check(a, b)
def test_intersection(
Body.body(shape: Capsule.capsule()) = a,
Body.body(shape: Hull.hull()) = b
),
do: CapsuleHull.check(a, b)
def test_intersection(
Body.body(shape: Hull.hull()) = b,
Body.body(shape: Capsule.capsule()) = a
),
do: CapsuleHull.check(a, b)
def test_intersection(
Body.body(shape: Hull.hull()) = a,
Body.body(shape: Hull.hull()) = b
),
do: HullHull.check(a, b)
def test_intersection(_, _), do: {:error, :bad_bodies}
end
|
lib/collision/narrowphase.ex
| 0.829527
| 0.413211
|
narrowphase.ex
|
starcoder
|
defmodule Ratio.FloatConversion do
use Ratio
@max_decimals Application.get_env(:ratio, :max_float_to_rational_digits)
@doc """
Converts a float to a rational number.
Because base-2 floats cannot represent all base-10 fractions properly, the results might be different from what you might expect.
See [The Perils of Floating Point](http://www.lahey.com/float.htm) for more information about this.
It is possible to restrict *max_decimals* to make the result more readable/understandable, at the possible loss of precision.
The default value for *max_decimals* is `#{@max_decimals}` (Erlang allows values between 0 and 249, see erlang's `float_to_list` function)
## Examples
iex> Ratio.FloatConversion.float_to_rational(10.0)
10
iex> Ratio.FloatConversion.float_to_rational(13.5)
27 <|> 2
iex> Ratio.FloatConversion.float_to_rational(1.1, 100)
2476979795053773 <|> 2251799813685248
iex> Ratio.FloatConversion.float_to_rational(1.1, 3)
11 <|> 10
"""
def float_to_rational(float, max_decimals \\ @max_decimals)
def float_to_rational(float, max_decimals) when Kernel.<(float, 0.0) do
-float_to_rational(abs(float), max_decimals)
end
def float_to_rational(float, max_decimals) do
float_to_intdec_tuple(float, max_decimals)
|> intdec_tuple_to_rational
end
# Changes 1.234 to {'1', '234'}
defp float_to_intdec_tuple(float, max_decimals) do
# While the `:decimals` option is allowed to be 0..249 according to the Erlang documentation,
# it will throw errors on large numbers if you do.
{integer_list, [?. | decimal_list]} =
:erlang.float_to_list(float, [{:decimals, max_decimals}, :compact])
|> Enum.split_while(fn x -> x != ?. end)
{integer_list, decimal_list}
end
# Changes {'1', '234'} to (1234 <|> 1000)
defp intdec_tuple_to_rational({integer_list, decimal_list}) do
decimal_len = Enum.count(decimal_list)
numerator = Ratio.pow(10, decimal_len)
integer = List.to_integer(integer_list)
decimal = List.to_integer(decimal_list)
(integer * numerator + decimal) <|> numerator
end
end
|
lib/ratio/float_conversion.ex
| 0.892038
| 0.49707
|
float_conversion.ex
|
starcoder
|
defmodule Lacca.Protocol do
@moduledoc """
This module provides helper functions for communicating with external
processes which implement the `shellac` protocol. The wire format of
the protocol follows:
- u16 packet length (i.e: read next `n` bytes)
- u8 packet flags
- [u8,...] packet payload (MsgPack encoded)
NOTE: if the high bit (0x80) of the packet flags are set this message
is *incomplete* and the payload must be buffered by the receiver.
"""
defmodule Const do
def version, do: 1
def handshake_req, do: 0x00
def handshake_rep, do: 0x01
def start_process, do: 0x02
def stop_process, do: 0x03
end
defmodule BinUtils do
@moduledoc """
A collection of useful transformations when working w/ the
binary portions of this protocol.
"""
@doc "Splits a large binary into a list of smaller `chunk_size` binaries."
def chunk_binary(bin, chunk_size)
when is_integer(chunk_size) and is_binary(bin) and chunk_size > 0
do
_chunk_helper(bin, chunk_size, [])
|> Enum.reverse()
end
def chunk_binary(_bin, _chunk_size) do
raise ArgumentError, "chunk size must be greater than zero"
end
defp _chunk_helper(<<>>, _chunk_size, acc), do: acc
defp _chunk_helper(bin, chunk_size, acc) do
case bin do
<< head::binary-size(chunk_size), rest::binary >> ->
_chunk_helper(rest, chunk_size, [head | acc])
<< rest::binary >> ->
_chunk_helper(<<>>, chunk_size, [rest | acc])
end
end
end
defmodule Encoder do
@moduledoc """
This module provides helpers to encode messages suitable for being
sent to a running a daemon which implements the `shellac` protocol.
This module assumes that such a daemon is operating on a `Port` which
has been started w/ the following options: `[:binary, {:packet, 2}]`.
"""
import Bitwise
import Const
# 16-bits less `length` and `flags`
@max_payload_size (0xFFFF - 0x02 - 0x01)
defp _serialize_packets(packets, type) do
_serialize_packets(packets, type, [])
end
defp _serialize_packets([], _type, acc) do
Enum.reverse(acc)
end
defp _serialize_packets([head | []], type, acc) do
packet = _write_packet(_encode_flags(type, false), head)
_serialize_packets([], type, [packet | acc])
end
defp _serialize_packets([head | tail], type, acc) do
packet = _write_packet(_encode_flags(type, true), head)
_serialize_packets(tail, type, [packet | acc])
end
defp _encode_flags(type, is_continuation \\ false) when is_integer(type) do
case is_continuation do
true -> bor(0x80, band(0x0F, type))
false -> band(0x0F, type)
end
end
defp _write_packet(flags, payload) when is_binary(payload) do
<< flags >> <> payload
end
def write_handshake_req() do
<< _encode_flags(handshake_req()), version() >>
end
def write_data_packet(data) when is_binary(data) do
data_packet = %{"DataIn" => %{"buf" => :erlang.binary_to_list(data)}}
# encode the payload and split it into wire packets
packet_bin = Msgpax.pack!(data_packet, [iodata: false])
packet_list = BinUtils.chunk_binary(packet_bin, @max_payload_size)
|> _serialize_packets(start_process())
end
def write_exit_packet() do
data_packet = %{"KillProcess" => nil}
# encode the payload and split it into wire packets
packet_bin = Msgpax.pack!(data_packet, [iodata: false])
packet_list = BinUtils.chunk_binary(packet_bin, @max_payload_size)
|> _serialize_packets(start_process())
end
def write_start_process(exec_name, args \\ []) do
start_process_packet = %{
"StartProcess" => %{
"exec" => exec_name,
"args" => args
}
}
# encode the payload and split it into wire packets
packet_bin = Msgpax.pack!(start_process_packet, [iodata: false])
packet_list = BinUtils.chunk_binary(packet_bin, @max_payload_size)
|> _serialize_packets(start_process())
end
end
end
|
lib/lacca/protocol.ex
| 0.697403
| 0.488832
|
protocol.ex
|
starcoder
|
defmodule Talib.RSI do
alias Talib.SMMA
alias Talib.Utility
@moduledoc ~S"""
Defines RSI.
"""
@doc """
Gets the RSI of a list.
Version: 1.0
Source: http://stockcharts.com/school/doku.php?id=chart_school:technical_indicators:relative_strength_index_rsi
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@typedoc """
Defines a RSI index.
* :period - The period of the RSI
* :values - List of values resulting from the calculation
"""
@type t :: %Talib.RSI{
period: integer,
values: [number]
}
defstruct period: 0,
values: []
@doc """
Gets the RSI of a list.
Raises `NoDataError` if the given list is an empty list.
## Examples
iex>Talib.RSI.from_list([81, 24, 75], 2)
{:ok, %Talib.RSI{
period: 2,
values: [
0.0, 0.0, 64.15
]
}}
iex>Talib.RSI.from_list([], 2)
{:error, :no_data}
"""
@spec from_list([number], integer) ::
{:ok, Talib.RSI.t()}
| {:error, atom}
def from_list(data, period \\ 14),
do: calculate(data, period)
@doc """
Gets the RSI of a list.
Raises `NoDataError` if the given list is an empty list.
## Examples
iex>Talib.RSI.from_list!([10, 2, 30], 2)
%Talib.RSI{
period: 2,
values: [
0.0, 0.0, 87.5
]
}
iex>Talib.RSI.from_list([], 2)
{:error, :no_data}
"""
@spec from_list!([number], integer) ::
Talib.RSI.t()
| no_return
def from_list!(data, period \\ 14) do
case calculate(data, period) do
{:ok, result} -> result
{:error, :no_data} -> raise NoDataError
end
end
@doc false
@spec calculate([number], integer) ::
{:ok, Talib.RSI.t()}
| {:error, atom}
defp calculate(data, period) do
try do
%SMMA{values: avg_gain} = SMMA.from_list!(Utility.gain!(data), period)
%SMMA{values: avg_loss} = SMMA.from_list!(Utility.loss!(data), period)
avg_gain_loss = Enum.zip(avg_gain, avg_loss)
result =
for {average_gain, average_loss} <- avg_gain_loss,
!is_nil(average_gain) && !is_nil(average_loss) do
relative_strength =
case average_loss do
0.0 -> 0.0
0 -> 0.0
_ -> average_gain / average_loss
end
(100 - 100 / (relative_strength + 1)) |> Float.round(4)
end
{:ok,
%Talib.RSI{
period: period,
values: result
}}
rescue
NoDataError -> {:error, :no_data}
end
end
end
|
lib/talib/rsi.ex
| 0.875887
| 0.57821
|
rsi.ex
|
starcoder
|
defmodule Cldr.LanguageTag.Sigil do
@moduledoc """
Implements a `sigil_l/2` macro to
constructing `t:Cldr.LanguageTag` structs.
"""
@doc """
Handles sigil `~l` for language tags.
## Arguments
* `locale_name` is either a [BCP 47](https://unicode-org.github.io/cldr/ldml/tr35.html#Identifiers)
locale name as a string or
* `locale_name` | `backend` where backend is a backend module name
## Options
* `u` Will parse the locale but will not add
likely subtags and its not guaranteed that this
language tag is known to the backend module.
## Returns
* a `t:Cldr.LanguageTag` struct or
* raises an exception
## Examples
iex> import Cldr.LanguageTag.Sigil
iex> ~l(en-US-u-ca-gregory)
#Cldr.LanguageTag<en-US-u-ca-gregory [validated]>
iex> import Cldr.LanguageTag.Sigil
iex> ~l(en-US-u-ca-gregory|MyApp.Cldr)
#Cldr.LanguageTag<en-US-u-ca-gregory [validated]>
"""
defmacro sigil_l(locale_name, 'u') do
{:<<>>, [_], [locale_name]} = locale_name
case parse_locale(String.split(locale_name, "|")) do
{:ok, locale_name} ->
quote do
unquote(Macro.escape(locale_name))
end
{:error, {exception, reason}} ->
raise exception, reason
end
end
defmacro sigil_l(locale_name, _opts) do
{:<<>>, [_], [locale_name]} = locale_name
case validate_locale(String.split(locale_name, "|")) do
{:ok, locale_name} ->
quote do
unquote(Macro.escape(locale_name))
end
{:error, {exception, reason}} ->
raise exception, reason
end
end
defp validate_locale([locale_name, backend]) do
backend = Module.concat([backend])
Cldr.validate_locale(locale_name, backend)
end
defp validate_locale([locale_name]) do
Cldr.validate_locale(locale_name)
end
@opts [add_likely_subtags: false]
defp parse_locale([locale_name, backend]) do
backend = Module.concat([backend])
Cldr.Locale.canonical_language_tag(locale_name, backend, @opts)
end
defp parse_locale([locale_name]) do
Cldr.Locale.canonical_language_tag(locale_name, Cldr.default_backend!(), @opts)
end
end
|
lib/cldr/sigil.ex
| 0.780579
| 0.421522
|
sigil.ex
|
starcoder
|
defmodule Oli.Delivery.Evaluation.Parser do
@moduledoc """
A parser for evaluation rules.
The macros present here end up defining a single public function
`rule` in this module that takes a string input and attempts to parse
the following grammar:
<rule> :== <expression> {<or> <expression>}
<expression> :== <clause> {<and> <clause>}
<clause> :== <not> <clause> | "("<rule>")” | <criterion>
<criterion> :== <component> <operator> <value>
<component> :== "attemptNumber” | "input” | "length(input)"
<operator> :== "<” | ">” | "=" | "like"
<value> :== { string }
<not> :== "!"
<and> :== "&&"
<or> :== "||"
"""
import NimbleParsec
not_ = string("!") |> replace(:!) |> label("!")
and_ = string("&&") |> optional(string(" ")) |> replace(:&&) |> label("&&")
or_ = string("||") |> optional(string(" ")) |> replace(:||) |> label("||")
lparen = ascii_char([?(]) |> label("(")
rparen = ascii_char([?)]) |> optional(string(" ")) |> label(")")
lbrace = ascii_char([?{]) |> label("{")
rbrace = ascii_char([?}]) |> optional(string(" ")) |> label("}")
op_lt = ascii_char([?<]) |> optional(string(" ")) |> replace(:lt) |> label("<")
op_gt = ascii_char([?>]) |> optional(string(" ")) |> replace(:gt) |> label(">")
op_eq = ascii_char([?=]) |> optional(string(" ")) |> replace(:eq) |> label("=")
op_like = string("like") |> optional(string(" ")) |> replace(:like) |> label("like")
op_contains =
string("contains") |> optional(string(" ")) |> replace(:contains) |> label("contains")
defcombinatorp(
:string_until_rbrace,
repeat(
lookahead_not(ascii_char([?}]))
|> utf8_char([])
)
|> reduce({List, :to_string, []})
)
defcombinatorp(:value, ignore(lbrace) |> parsec(:string_until_rbrace) |> ignore(rbrace))
# <component> :== "attemptNumber" | "input" | "length(input)"
attempt_number_ =
string("attemptNumber")
|> optional(string(" "))
|> replace(:attempt_number)
|> label("attemptNumber")
input_ = string("input") |> optional(string(" ")) |> replace(:input) |> label("input")
input_length_ =
string("length(input)")
|> optional(string(" "))
|> replace(:input_length)
|> label("input_length")
defcombinatorp(:component, choice([attempt_number_, input_, input_length_]))
# <operator> :== "<" | ">" | "=" | "like" | "contains"
defcombinatorp(:operator, choice([op_lt, op_gt, op_eq, op_like, op_contains]))
# <criterion> :== <component> <operator> <check>
defcombinatorp(
:criterion,
parsec(:component)
|> parsec(:operator)
|> parsec(:value)
|> reduce(:to_prefix_notation)
)
# <clause> :== <not> <clause> | "("<rule>")" | <criterion>
negation = not_ |> ignore |> parsec(:clause) |> tag(:!)
grouping = ignore(lparen) |> parsec(:rule) |> ignore(rparen)
criterion_ = parsec(:criterion)
defcombinatorp(:clause, choice([negation, grouping, criterion_]))
# <expression> :== <clause> {<and> <clause>}
defcombinatorp(
:expression,
parsec(:clause)
|> repeat(and_ |> parsec(:clause))
|> reduce(:to_prefix_notation)
)
# <rule> :== <expression> {<or> <expression>}
defparsec(
:rule,
parsec(:expression)
|> repeat(or_ |> parsec(:expression))
|> reduce(:to_prefix_notation)
)
defp to_prefix_notation(acc) do
case acc do
[lhs, op, rhs] -> {op, lhs, rhs}
[f, o] -> {:eval, f, o}
[!: [negated]] -> {:!, negated}
[item] -> item
end
end
end
|
lib/oli/delivery/evaluation/parser.ex
| 0.594551
| 0.515864
|
parser.ex
|
starcoder
|
defmodule Rir.Stat do
@moduledoc """
Functions to call endpoints on the [RIPEstat Data
API](https://stat.ripe.net/docs/02.data-api/).
## Them are the rules
These are the rules for the usage of the data API:
- no limit on the amount of requests
- but please register if you plan to regularly do more than 1000 requests/day
- see "Regular Usage" for details.
- the system limits the usage to 8 concurrent requests coming from one IP address
- RIPEstat [Service Terms and
Conditions](https://www.ripe.net/about-us/legal/ripestat-service-terms-and-conditions)
apply
"""
alias HTTPoison
@atoms %{
"error" => :error,
"info" => :info,
"warning" => :warning,
"ok" => :ok,
"supported" => :supported,
"deprecated" => :deprecated,
"maintenance" => :maintenance,
"development" => :development,
"valid" => :valid,
"invalid" => :invalid,
"invalid_asn" => :invalid_asn,
"unknown" => :unknown
}
HTTPoison.start()
# API
@doc """
Returns the url for the given the api endpoint `name` & the `params` (keyword list).
## Example
iex> url("announced-prefixes", resource: "1234")
"https://stat.ripe.net/data/announced-prefixes/data.json?resource=1234"
"""
@spec url(binary, Keyword.t()) :: binary
def url(name, params \\ []) do
params
|> Enum.map(fn {k, v} -> "#{k}=#{v}" end)
|> Enum.join("&")
|> then(fn params -> "https://stat.ripe.net/data/#{name}/data.json?#{params}" end)
end
@doc """
Returns a map with `call` details and either a `data` field or an `error` field.
The resulting map is one of:
- `%{call: call, data: data}`
- `%{call: call, error: reason}`
`call` is a map with call details:
```
%{
call: :supported | :deprecated | :development | :unknown
http: integer, # the http status code
info: nil | "some info msg"
name: "api endpoint name",
status: :ok | :error | :maintenance,
url: "api-endpoint called",
version: "major.minor"
}
```
The `call` meanings are:
- `:supported`, endpoint is meant to be stable and without bugs
- `:deprecated`, endpoint will cease to exist at some point in time
- `:development`, endpoint is a WIP and might change or dissapear at any moment
- `:unknown`, endpoint is unknown (a locally defined status)
Strangely enough, when a non-existing endpoint is called, all `call` details
indicate success and `data` is an empty map. Only the `call.info` indicates
that the data call does not exist. Hence, `Rir.Stat.get/1` checks for
this condition and if true:
- sets `call` to `:unknown`
- sets `status` to `:error`
- removes the empty `data` map, and
- adds an `error` field, saying "unknown API endpoint"
`data` is a map whose contents depends on the api endpoint used.
`error` appears when there is some type of error:
- an parameter had an invalid value
- the endpoint does not exist
- the data could not be decoded
- the server had some internal error
- there were some network problems and no call was made
In the latter case, `%{call: details, error: "some description"}` is
returned, where the call details are limited to only these fields:
- `info: "error reason: <reason>"`
- `status: :error`
- `url: endpoint that could not be reached`
"""
@spec get(String.t(), Keyword.t()) :: {:ok, {map, map}} | {:error, {map, any}}
def get(url, opts \\ []) do
# get an url response and decode its data part
with {:ok, response} <- get_url(url, opts),
{:ok, body} <- decode_json(response.body),
{:ok, data} <- get_data(body),
{:ok, status} <- get_status(body),
{:ok, msgs} <- decode_messages(body) do
call = %{
call: to_atom(body["data_call_status"]),
http: response.status_code,
info: msgs[:info],
name: body["data_call_name"],
status: status,
url: url,
version: body["version"],
opts: opts
}
case status do
:error -> {:error, {call, msgs[:error]}}
_ -> {:ok, {call, data}}
end
|> sanity_check()
# return: {:ok, {call, data}} or {:error, {call, reason}}
else
{:error, reason} ->
%{
call: %{
info: "error reason: #{reason}",
status: :error,
url: url,
opts: opts
},
error: "#{reason}"
}
end
end
@doc """
Returns an atom for a known first word in given `string`, otherwise just the
first word.
Note: the first word is also downcased.
## Examples
iex> to_atom("deprecated - 2022-12-31")
:deprecated
iex> to_atom("But don't you worry")
"but"
"""
@spec to_atom(String.t()) :: atom | String.t()
def to_atom(type) do
type =
type
|> String.downcase()
|> String.split()
|> List.first()
case @atoms[type] do
nil -> type
atom -> atom
end
end
# Helpers
@spec get_url(binary, Keyword.t()) :: {:ok, HTTPoison.Response.t()} | {:error, any}
defp get_url(url, opts) do
case HTTPoison.get(url, [], opts) do
{:ok, response} ->
{:ok, response}
{:error, %HTTPoison.Error{reason: :timeout}} ->
retry(url, opts)
{:error, error} ->
IO.inspect(error, label: :error)
{:error, error.reason}
end
end
defp retry(url, opts) do
n = Keyword.get(opts, :retry, 0)
t = Keyword.get(opts, :recv_timeout, 5000) * 2
IO.inspect("[retry][timeout] #{url}, #{n} attempts left, recv_timeout: #{t} ms")
case n do
0 -> {:error, :timeout}
n -> get_url(url, Keyword.merge(opts, retry: n - 1, recv_timeout: t))
end
end
@spec decode_json(any) :: {:ok, term()} | {:eror, :json_decode}
defp decode_json(body) do
case Jason.decode(body) do
{:ok, body} -> {:ok, body}
_ -> {:error, :json_decode}
end
end
@spec get_data(term()) :: {:ok, map} | {:error, :nodata}
defp get_data(body) do
case body["data"] do
map when is_map(map) -> {:ok, map}
_ -> {:error, :nodata}
end
end
@spec decode_messages(term()) :: {:ok, map}
defp decode_messages(body) do
msgs =
body["messages"]
|> Enum.map(fn [type, msg] -> {to_atom(type), msg} end)
|> Enum.into(%{})
{:ok, msgs}
rescue
_ -> {:ok, %{}}
end
@spec get_status(term()) :: {:ok, atom | binary} | {:error, atom}
defp get_status(body) do
# ok, error or maintenance
case body["status"] do
nil -> {:error, :nostatus}
status -> {:ok, to_atom(status)}
end
end
@spec sanity_check(tuple) :: {:ok, {map, map}} | {:error, {map, any}}
defp sanity_check({:ok, {call, data}} = result) when map_size(data) == 0 do
# correct the results when a non-existing API endpoint was called
if String.match?(call.info, ~r/data\s*call\s*does\s*not\s*exist/i) do
call =
call
|> Map.put(:call, :unknown)
|> Map.put(:status, :error)
{:error, {call, "unknown API endpoint"}}
else
result
end
end
defp sanity_check({:error, reason}),
do: {:error, reason}
defp sanity_check(result),
# ignore other conditions
do: result
end
|
lib/rir/stat.ex
| 0.859
| 0.820577
|
stat.ex
|
starcoder
|
defmodule Groupsort do
@moduledoc """
Elixir module to group students efficiently, maxmizing the number of novel pairs
by looking at a given history, and minimizing the number of repeated historical
pairs.
WIP - current implementation is BRUTE FORCE. It gets the best solution, but explodes
for numbers greater than ~15 students.
"""
@doc """
Takes a history map and increments the count at the given pair key.
If no such historical count exists, a new count is started at 1.
## Examples
iex> h = %{{1, 2} => 1}
iex> Groupsort.add_pair(h, {1, 2})
%{{1, 2} => 2}
iex> Groupsort.add_pair(h, {2, 3})
%{{1, 2} => 1, {2, 3} => 1}
"""
def add_pair(history, pair) do
Map.put(history, pair, get_pairing_count(history, pair) + 1)
end
@doc """
Helper function for making sure pair fetching & creation is always consistently ordered.
Given two IDs, gives you the tuple of those IDs, ordered, for passing around as a pair.
## Examples
iex> Groupsort.make_pair(2, 3)
{2, 3}
iex> Groupsort.make_pair(3, 2)
{2, 3}
"""
def make_pair(student1, student2) do
Enum.min_max([student1, student2])
end
@doc """
Returns the historical pairing count for the pair of students given
or zero in the case that there is no entry for the pair in the history.
## Examples
iex> history = %{{1, 2} => 4, {1, 3} => 3, {2, 3} => 7}
iex> Groupsort.get_pairing_count(history, {1, 3})
3
iex> Groupsort.get_pairing_count(history, {1, 5})
0
"""
def get_pairing_count(history, pair) do
history[pair] || 0
end
@doc """
Takes a history and a group of IDs, and returns the sum of the historical
pairing count for each unique pair in the group
## Examples
iex> h = %{{1, 2} => 3, {2, 3} => 1, {1, 3} => 2}
iex> Groupsort.get_group_pair_count(h, [1, 2, 3])
6
iex> Groupsort.get_group_pair_count(h, [1, 2])
3
"""
def get_group_pair_count(history, group) do
group
|> combinations(2)
|> Enum.reduce(0, &(get_pairing_count(history, List.to_tuple(&1)) + &2))
end
def get_groupset_pair_count(history, groupset) do
groupset
|> Enum.reduce(0, &(get_group_pair_count(history, &1) + &2))
end
@doc """
Gives a list of the unique combinations of size n of a given list.
Courtesy -> https://stackoverflow.com/a/30587756
Probably makes sense to extract into its own module eventually.
## Examples
iex> Groupsort.combinations([1, 2, 3], 2)
[[1, 2], [1, 3], [2, 3]]
"""
def combinations(_, 0), do: [[]]
def combinations([], _), do: []
def combinations([x|xs], n) do
(for y <- combinations(xs, n - 1), do: [x|y]) ++ combinations(xs, n)
end
@doc """
Groupsort.sort is the primary method of this module. Given a history of student pairings,
a student_list (list of IDs), and a desired group_config, this function will return a
groupset (ie. a list of groups) matching the group_config, with the minimum number of
repeated pairs from the given history.
## Examples
iex> history = %{{1, 2} => 4, {1, 3} => 3, {1, 4} => 0, {2, 3} => 0, {2, 4} => 2, {3, 4} => 1}
iex> student_list = [1, 2, 3, 4]
iex> Groupsort.sort(history, student_list, [2, 2])
[[2, 3], [1, 4]]
In the above example, we have 4 students, and we want to create a groupset that consists
of two groups of two (the sum of our group_config values must naturally total the count
of students).
From our history, we can see that students 1 and 4 have never been paired together. Likewise,
students 2 and 3 have never been paired. Every other possible pair has some non-zero count.
Therefore, our ideal final pairing is 1 and 4, and 2 and 3. Two groups of two, minimizing the
pairing overlap.
"""
def sort(history, student_list, group_config, groupset \\ [])
def sort(_, student_list, [_|[]], groupset), do: [student_list | groupset]
def sort(history, student_list, [group_size | group_config], groupset) do
student_list
|> combinations(group_size)
|> Stream.map(&(sort(history, student_list -- &1, group_config, [&1 | groupset])))
|> Enum.min_by(&(get_groupset_pair_count(history, &1)))
end
end
|
lib/groupsort.ex
| 0.870748
| 0.718249
|
groupsort.ex
|
starcoder
|
defmodule Clova do
@moduledoc """
A behaviour for Clova extentions.
An implementation of this behaviour will be called by the `Clova.DispatcherPlug`.
Each callback is called with a map representing the decoded clova request, and a struct representing the
clova response. Helpers from the `Clova.Request` and `Clova.Response` modules for reading the request
and manipulating the response are imported by default. The callbacks should return the completed response
struct, which will be added to the `Plug.Conn` struct by `Clova.DispatcherPlug`.
"""
alias Clova.{Request, Response}
@doc """
Called when a `LaunchRequest` is received.
The `response` parameter is an empty response which can be used with the functions
in `Clova.Response` in order to produce a completed response to return.
"""
@callback handle_launch(
request :: Map.t(),
response :: Response.t()
) :: Response.t()
@doc """
Called when an `IntentRequest` is received.
The name of the intent is extracted and passed as the `name` argument,
to allow for easier pattern matching. `Clova.Request.get_slot/2` can be used to retrieve
the slot data for an intent.
The `response` parameter is an empty response which can be used with the functions
in `Clova.Response` in order to produce a completed response to return.
"""
@callback handle_intent(
name :: String.t(),
request :: Map.t(),
response :: Response.t()
) :: Response.t()
@doc """
Called when a `SessionEndedRequest` is received.
The `response` parameter is an empty response which can be used with the functions
in `Clova.Response` in order to produce a completed response to return. At the time of writing
any response to a `SessionEndedRequest` is ignored by the server.
"""
@callback handle_session_ended(
request :: Map.t(),
response :: Response.t()
) :: Response.t()
defmacro __using__(_) do
quote do
@behaviour Clova
import Clova.{Request, Response}
def handle_launch(_request, response), do: response
def handle_intent(_name, _request, response), do: response
def handle_session_ended(_request, response), do: response
defoverridable Clova
end
end
end
|
lib/clova.ex
| 0.890211
| 0.639237
|
clova.ex
|
starcoder
|
defmodule Unzip do
@moduledoc """
Module to get files out of a zip. Works with local and remote files
## Overview
Unzip tries to solve problem of accessing files from a zip which is not local (Aws S3, sftp etc). It does this by simply separating file system and zip implementation. Anything which implements `Unzip.FileAccess` can be used to get zip contents. Unzip relies on the ability to seek and read of the file, This is due to the nature of zip file. Files from the zip are read on demand.
## Usage
# Unzip.LocalFile implements Unzip.FileAccess
zip_file = Unzip.LocalFile.open("foo/bar.zip")
# `new` reads list of files by reading central directory found at the end of the zip
{:ok, unzip} = Unzip.new(zip_file)
# presents already read files metadata
file_entries = Unzip.list_entries(unzip)
# returns decompressed file stream
stream = Unzip.file_stream!(unzip, "baz.png")
Supports STORED and DEFLATE compression methods. Does not support zip64 specification yet
"""
require Logger
alias Unzip.FileAccess
alias Unzip.FileBuffer
alias Unzip.RangeTree
use Bitwise, only_operators: true
@chunk_size 65_000
defstruct [:zip, :cd_list]
defmodule Error do
defexception [:message]
end
defmodule Entry do
@moduledoc """
File metadata returned by `Unzip.list_entries/1`
* `:file_name` - (string) File name with complete path. Directory files will have `/` at the end of their name
* `:last_modified_datetime` - (NaiveDateTime) last modified date and time of the file
* `:compressed_size` - (positive integer) Compressed file size in bytes
* `:uncompressed_size` - (positive integer) Uncompressed file size in bytes
"""
defstruct [
:file_name,
:last_modified_datetime,
:compressed_size,
:uncompressed_size
]
end
@doc """
Creates Unzip struct by reading central directory found at the end of the zip (reads entries in the file)
"""
def new(zip) do
with {:ok, eocd} <- find_eocd(zip),
{:ok, entries} <- read_cd_entries(zip, eocd) do
{:ok, %Unzip{zip: zip, cd_list: entries}}
end
end
@doc """
Returns list of files metadata. This does not make `pread` call as metadata is already by `new/1`.
See `Unzip.Entry` for metadata fields
"""
def list_entries(unzip) do
Enum.map(unzip.cd_list, fn {_, entry} ->
%Entry{
file_name: entry.file_name,
last_modified_datetime: entry.last_modified_datetime,
compressed_size: entry.compressed_size,
uncompressed_size: entry.uncompressed_size
}
end)
end
@doc """
Returns decompressed file entry from the zip as a stream. `file_name` *must* be complete file path. File is read in the chunks of 65k
"""
def file_stream!(%Unzip{zip: zip, cd_list: cd_list}, file_name) do
unless Map.has_key?(cd_list, file_name) do
raise Error, message: "File #{inspect(file_name)} not present in the zip"
end
entry = Map.fetch!(cd_list, file_name)
local_header = pread!(zip, entry.local_header_offset, 30)
<<0x04034B50::little-32, _::little-32, compression_method::little-16, _::little-128,
file_name_length::little-16, extra_field_length::little-16>> = local_header
offset = entry.local_header_offset + 30 + file_name_length + extra_field_length
stream!(zip, offset, entry.compressed_size)
|> decompress(compression_method)
|> crc_check(entry.crc)
end
defp stream!(file, offset, size) do
end_offset = offset + size
Stream.unfold(offset, fn
offset when offset >= end_offset ->
nil
offset ->
next_offset = min(offset + @chunk_size, end_offset)
data = pread!(file, offset, next_offset - offset)
{data, next_offset}
end)
end
defp decompress(stream, 0x8) do
stream
|> Stream.transform(
fn ->
z = :zlib.open()
:ok = :zlib.inflateInit(z, -15)
:zlib.setBufSize(z, 512 * 1024)
{z, true}
end,
fn data, {z, flag} ->
case flag do
true ->
uncompressed1 = case :zlib.inflateChunk(z, data) do
{_, uncompressed} -> uncompressed;
uncompressed -> uncompressed
end
{[uncompressed1], {z, false}};
false ->
uncompressed = :zlib.inflateChunk(z)
{[uncompressed], {z, false}}
end
end,
fn {z, _flag} ->
:zlib.inflateEnd(z)
:zlib.close(z)
end
)
end
defp decompress(stream, 0x0), do: stream
defp decompress(_stream, compression_method),
do: raise(Error, message: "Compression method #{compression_method} is not supported")
defp crc_check(stream, expected_crc) do
stream
|> Stream.transform(
fn -> :erlang.crc32(<<>>) end,
fn data, crc -> {[data], :erlang.crc32(crc, data)} end,
fn crc ->
unless crc == expected_crc do
raise Error, message: "CRC mismatch. expected: #{expected_crc} got: #{crc}"
end
end
)
end
defp read_cd_entries(zip, eocd) do
with {:ok, file_buffer} <-
FileBuffer.new(
zip,
@chunk_size,
eocd.cd_offset + eocd.cd_size,
eocd.cd_offset,
:forward
) do
parse_cd(file_buffer, %{entries: %{}, range_tree: RangeTree.new()})
end
end
defp parse_cd(%FileBuffer{buffer_position: pos, limit: limit}, %{entries: entries})
when pos >= limit,
do: {:ok, entries}
defp parse_cd(buffer, acc) do
with {:ok, chunk, buffer} <- FileBuffer.next_chunk(buffer, 46),
<<0x02014B50::little-32, _::little-32, flag::little-16, compression_method::little-16,
mtime::little-16, mdate::little-16, crc::little-32, compressed_size::little-32,
uncompressed_size::little-32, file_name_length::little-16,
extra_field_length::little-16, comment_length::little-16, _::little-64,
local_header_offset::little-32>> <- chunk,
{:ok, buffer} <- FileBuffer.move_forward_by(buffer, 46),
{:ok, file_name, buffer} <- FileBuffer.next_chunk(buffer, file_name_length),
{:ok, buffer} <- FileBuffer.move_forward_by(buffer, file_name_length),
{:ok, extra_fields, buffer} <- FileBuffer.next_chunk(buffer, extra_field_length),
{:ok, buffer} <- FileBuffer.move_forward_by(buffer, extra_field_length),
{:ok, _file_comment, buffer} <- FileBuffer.next_chunk(buffer, comment_length),
{:ok, buffer} <- FileBuffer.move_forward_by(buffer, comment_length) do
entry = %{
bit_flag: flag,
compression_method: compression_method,
last_modified_datetime: to_datetime(<<mdate::16>>, <<mtime::16>>),
crc: crc,
compressed_size: compressed_size,
uncompressed_size: uncompressed_size,
local_header_offset: local_header_offset,
# TODO: we should treat binary as "IBM Code Page 437" encoded string if GP flag 11 is not set
file_name: file_name
}
entry =
if need_zip64_extra?(entry) do
merge_zip64_extra(entry, extra_fields)
else
entry
end
case add_entry(acc, file_name, entry) do
{:error, _} = error -> error
acc -> parse_cd(buffer, acc)
end
end
else
{:error, :invalid_count} -> {:error, "Invalid zip file, invalid central directory"}
error -> error
end
defp add_entry(%{entries: entries, range_tree: range_tree}, file_name, entry) do
if RangeTree.overlap?(range_tree, entry.local_header_offset, entry.compressed_size) do
{:error, "Invalid zip file, found overlapping zip entries"}
else
%{
entries: Map.put(entries, file_name, entry),
range_tree: RangeTree.insert(range_tree, entry.local_header_offset, entry.compressed_size)
}
end
end
defp need_zip64_extra?(%{
compressed_size: cs,
uncompressed_size: ucs,
local_header_offset: offset
}) do
Enum.any?([cs, ucs, offset], &(&1 == 0xFFFFFFFF))
end
@zip64_extra_field_id 0x0001
defp merge_zip64_extra(entry, extra) do
zip64_extra =
find_extra_fields(extra)
|> Map.fetch!(@zip64_extra_field_id)
{entry, zip64_extra} =
if entry[:uncompressed_size] == 0xFFFFFFFF do
<<uncompressed_size::little-64, zip64_extra::binary>> = zip64_extra
{%{entry | uncompressed_size: uncompressed_size}, zip64_extra}
else
{entry, zip64_extra}
end
{entry, zip64_extra} =
if entry[:compressed_size] == 0xFFFFFFFF do
<<compressed_size::little-64, zip64_extra::binary>> = zip64_extra
{%{entry | compressed_size: compressed_size}, zip64_extra}
else
{entry, zip64_extra}
end
{entry, _zip64_extra} =
if entry[:local_header_offset] == 0xFFFFFFFF do
<<local_header_offset::little-64, zip64_extra::binary>> = zip64_extra
{%{entry | local_header_offset: local_header_offset}, zip64_extra}
else
{entry, zip64_extra}
end
entry
end
defp find_extra_fields(extra, result \\ %{})
defp find_extra_fields(<<>>, result), do: result
defp find_extra_fields(
<<id::little-16, size::little-16, data::binary-size(size), rest::binary>>,
result
) do
find_extra_fields(rest, Map.put(result, id, data))
end
defp find_eocd(zip) do
with {:ok, file_buffer} <- FileBuffer.new(zip, @chunk_size),
{:ok, eocd, file_buffer} <- find_eocd(file_buffer, 0) do
case find_zip64_eocd(file_buffer) do
{:ok, zip64_eocd} ->
{:ok, zip64_eocd}
_ ->
{:ok, eocd}
end
end
end
@zip64_eocd_locator_size 20
@zip64_eocd_size 56
defp find_zip64_eocd(file_buffer) do
with {:ok, chunk, file_buffer} <-
FileBuffer.next_chunk(file_buffer, @zip64_eocd_locator_size),
true <- zip64?(chunk) do
<<0x07064B50::little-32, _::little-32, eocd_offset::little-64, _::little-32>> = chunk
{:ok,
<<0x06064B50::little-32, _::64, _::16, _::16, _::32, _::32, _::64,
total_entries::little-64, cd_size::little-64,
cd_offset::little-64>>} = pread(file_buffer.file, eocd_offset, @zip64_eocd_size)
{:ok, %{total_entries: total_entries, cd_size: cd_size, cd_offset: cd_offset}}
else
_ ->
false
end
end
defp zip64?(<<0x07064B50::little-32, _::little-128>>), do: true
defp zip64?(_), do: false
# Spec has variable length comment at the end of zip after EOCD, so
# EOCD can anywhere in the zip file. To avoid exhaustive search, we
# limit search space to last 5Mb. If we don't find EOCD within that
# we assume it's an invalid zip
@eocd_seach_limit 5 * 1024 * 1024
defp find_eocd(_file_buffer, consumed) when consumed > @eocd_seach_limit,
do: {:error, "Invalid zip file, missing EOCD record"}
@eocd_header_size 22
defp find_eocd(file_buffer, consumed) do
with {:ok, chunk, file_buffer} <- FileBuffer.next_chunk(file_buffer, @eocd_header_size) do
case chunk do
<<0x06054B50::little-32, _ignore::little-48, total_entries::little-16, cd_size::little-32,
cd_offset::little-32, ^consumed::little-16>> ->
{:ok, buffer} = FileBuffer.move_backward_by(file_buffer, @eocd_header_size)
{:ok, %{total_entries: total_entries, cd_size: cd_size, cd_offset: cd_offset}, buffer}
chunk when byte_size(chunk) < @eocd_header_size ->
{:error, "Invalid zip file, missing EOCD record"}
_ ->
{:ok, buffer} = FileBuffer.move_backward_by(file_buffer, 1)
find_eocd(buffer, consumed + 1)
end
end
end
defp to_datetime(<<year::7, month::4, day::5>> = date, <<hour::5, minute::6, second::5>> = time) do
case NaiveDateTime.new(1980 + year, month, day, hour, minute, second * 2) do
{:ok, datetime} ->
datetime
_ ->
Logger.warn(
"[unzip] invalid datetime. date: #{inspect_binary(date)} time: #{inspect_binary(time)}"
)
nil
end
end
defp pread!(file, offset, length) do
case pread(file, offset, length) do
{:ok, term} -> term
{:error, reason} when is_binary(reason) -> raise Error, message: reason
{:error, reason} -> raise Error, message: inspect(reason)
end
end
defp pread(file, offset, length) do
case FileAccess.pread(file, offset, length) do
{:ok, term} when is_binary(term) ->
{:ok, term}
{:error, reason} ->
{:error, reason}
_ ->
{:error, "Invalid data returned by pread/3. Expected binary"}
end
end
defp inspect_binary(binary), do: inspect(binary, binaries: :as_binaries, base: :hex)
end
|
lib/unzip.ex
| 0.792865
| 0.611962
|
unzip.ex
|
starcoder
|
defmodule Clova.DispatcherPlug do
import Plug.Conn
@behaviour Plug
@moduledoc """
A plug for dispatching CEK request to your `Clova` implementation.
For simple skills, `Clova.SkillPlug` provides a wrapper of this and related plugs.
Pass your callback module as the `dispatch_to` argument to the plug.
This plug expects the request to have been parsed by `Plug.Parsers`, and validated by `Clova.ValidatorPlug`.
The `Clova.Response` struct returned from your `Clova` implementation is placed into the `:clova_response`
assign. To encode it to JSON, the `Clova.EncoderPlug` can be used.
If you do not use `Clova.EncoderPlug`, you need to encode and set the `Plug.Conn`'s response body yourself:
```
plug Plug.Parsers,
parsers: [:json],
json_decoder: Poison,
body_reader: Clova.CachingBodyReader.spec()
plug Clova.ValidatorPlug, app_id: "com.example.my_extension"
plug Clova.DispatcherPlug, dispatch_to: MyExtension
plug :match
plug :dispatch
post "/endpoint" do
conn
|> put_resp_content_type("application/json")
|> send_resp(conn.status, Poison.encode!(conn.assigns.clova_response))
end
```
"""
def init(opts) do
with {:ok, module} when is_atom(module) <- Keyword.fetch(opts, :dispatch_to) do
Enum.into(opts, %{})
else
{:ok, module} ->
raise ArgumentError,
message: ":dispatch_to option must be a module name atom, got: #{inspect module}"
:error ->
raise ArgumentError, message: "Must supply dispatch module as :dispatch_to argument"
end
end
def call(%Plug.Conn{body_params: request} = conn, %{dispatch_to: handler}) do
response = dispatch(request, handler)
message =
"Clova.Dispatcher: response placed in :clova_response Plug.Conn assign. " <>
"Encode response to JSON before sending (see Clova.Encoder plug)."
conn
|> assign(:clova_response, response)
|> resp(conn.status || :ok, message)
end
defp dispatch(%{"request" => %{"type" => "LaunchRequest"}} = request, handler) do
handler.handle_launch(request, %Clova.Response{})
end
defp dispatch(
%{"request" => %{"type" => "IntentRequest", "intent" => %{"name" => name}}} = request,
handler
) do
handler.handle_intent(name, request, %Clova.Response{})
end
defp dispatch(%{"request" => %{"type" => "SessionEndedRequest"}} = request, handler) do
handler.handle_session_ended(request, %Clova.Response{})
end
end
|
lib/clova/dispatcher_plug.ex
| 0.784113
| 0.728845
|
dispatcher_plug.ex
|
starcoder
|
defmodule Zaryn.Crypto.ID do
@moduledoc false
alias Zaryn.Crypto
@doc """
Get an identification from a elliptic curve name
## Examples
iex> ID.from_curve(:ed25519)
0
iex> ID.from_curve(:secp256r1)
1
"""
@spec from_curve(Crypto.supported_curve()) :: integer()
def from_curve(:ed25519), do: 0
def from_curve(:secp256r1), do: 1
def from_curve(:secp256k1), do: 2
@doc """
Get a curve name from an curve ID
## Examples
iex> ID.to_curve(0)
:ed25519
iex> ID.to_curve(1)
:secp256r1
"""
@spec to_curve(integer()) :: Crypto.supported_curve()
def to_curve(0), do: :ed25519
def to_curve(1), do: :secp256r1
def to_curve(2), do: :secp256k1
@doc """
Get an identification from an hash algorithm
## Examples
iex> ID.from_hash(:sha256)
0
iex> ID.from_hash(:blake2b)
4
"""
@spec from_hash(Crypto.supported_hash()) :: integer()
def from_hash(:sha256), do: 0
def from_hash(:sha512), do: 1
def from_hash(:sha3_256), do: 2
def from_hash(:sha3_512), do: 3
def from_hash(:blake2b), do: 4
@doc """
Get an identification from a key origin
## Examples
iex> ID.from_origin(:software)
0
iex> ID.from_origin(:tpm)
1
"""
@spec from_origin(Crypto.supported_origin()) :: integer()
def from_origin(:software), do: 0
def from_origin(:tpm), do: 1
@doc """
Get a origin from an identification
"""
@spec to_origin(integer()) :: Crypto.supported_origin()
def to_origin(0), do: :software
def to_origin(1), do: :tpm
@doc """
Prepend hash by the algorithm identification byte
## Examples
iex> ID.prepend_hash(<<67, 114, 249, 17, 148, 8, 100, 233, 130, 249, 233, 179, 216, 18, 36, 222, 187,
...> 161, 212, 202, 143, 54, 45, 141, 99, 144, 171, 133, 137, 173, 211, 126>>, :sha256)
<<0, 67, 114, 249, 17, 148, 8, 100, 233, 130, 249, 233, 179, 216, 18, 36, 222, 187,
161, 212, 202, 143, 54, 45, 141, 99, 144, 171, 133, 137, 173, 211, 126>>
"""
@spec prepend_hash(binary(), Crypto.supported_hash()) :: <<_::8, _::_*8>>
def prepend_hash(hash, algorithm) do
<<from_hash(algorithm)::8, hash::binary>>
end
@doc """
Prepend each keys by the identifying curve and the origin
## Examples
iex> ID.prepend_keypair({
...> <<38, 59, 8, 1, 172, 20, 74, 63, 15, 72, 206, 129, 140, 212, 188, 102, 203, 51,
...> 188, 207, 135, 134, 211, 3, 87, 148, 178, 162, 118, 208, 109, 96>>,
...> <<21, 150, 237, 25, 119, 159, 16, 128, 43, 48, 169, 243, 214, 246, 102, 147,
...> 172, 79, 60, 159, 89, 230, 31, 254, 187, 176, 70, 166, 119, 96, 87, 194>>
...> }, :ed25519)
{
<<0, 0, 38, 59, 8, 1, 172, 20, 74, 63, 15, 72, 206, 129, 140, 212, 188, 102, 203, 51,
188, 207, 135, 134, 211, 3, 87, 148, 178, 162, 118, 208, 109, 96>>,
<<0, 0, 21, 150, 237, 25, 119, 159, 16, 128, 43, 48, 169, 243, 214, 246, 102, 147,
172, 79, 60, 159, 89, 230, 31, 254, 187, 176, 70, 166, 119, 96, 87, 194>>
}
"""
@spec prepend_keypair(
{binary(), binary()},
Crypto.supported_curve(),
Crypto.supported_origin()
) ::
{Crypto.key(), Crypto.key()}
def prepend_keypair({public_key, private_key}, curve, origin \\ :software) do
{prepend_key(public_key, curve, origin), prepend_key(private_key, curve, origin)}
end
@doc """
Prepend key by identifying the curve and the origin
"""
@spec prepend_key(binary(), Crypto.supported_curve(), Crypto.supported_origin()) :: Crypto.key()
def prepend_key(key, curve, origin \\ :software) do
curve_id = from_curve(curve)
origin_id = from_origin(origin)
<<curve_id::8, origin_id::8, key::binary>>
end
end
|
lib/zaryn/crypto/id.ex
| 0.907224
| 0.414691
|
id.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.NodeRemoveStatus do
@moduledoc """
Z-Wave command for NODE_REMOVE_STATUS
This command is useful to respond to a `Grizzly.ZWave.Commands.NodeRemove`
command.
Params:
* `:seq_number` - the sequence number from the original node remove command
* `:status` - the status of the result of the node removal
* `:node_id` - the node id of the removed node
* `:command_class_version` - explicitly set the command class version used
to encode the command (optional - defaults to NetworkManagementInclusion v4)
When encoding the params you can encode for a specific command class version
by passing the `:command_class_version` to the encode options
```elixir
Grizzly.ZWave.Commands.NodeRemoveStatus.encode_params(node_remove_status, command_class_version: 3)
```
If there is no command class version specified this will encode to version 4 of the
`NetworkManagementInclusion` command class. This version supports the use of 16 bit node
ids.
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.{Command, DecodeError, NodeId}
alias Grizzly.ZWave.CommandClasses.NetworkManagementInclusion
@type status() :: :done | :failed
@impl Grizzly.ZWave.Command
def new(params \\ []) do
# TODO validate params
command = %Command{
name: :node_remove_status,
command_byte: 0x04,
command_class: NetworkManagementInclusion,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl Grizzly.ZWave.Command
def encode_params(command, opts \\ []) do
seq_number = Command.param!(command, :seq_number)
status = Command.param!(command, :status)
node_id = Command.param!(command, :node_id)
case Keyword.get(opts, :command_class_version, 4) do
4 ->
<<seq_number, encode_status(status), NodeId.encode_extended(node_id)::binary>>
n when n < 4 ->
<<seq_number, encode_status(status), node_id>>
end
end
@impl Grizzly.ZWave.Command
@spec decode_params(binary()) :: {:ok, keyword()} | {:error, DecodeError.t()}
def decode_params(<<seq_number, status_byte, node_id::binary>>) do
case decode_status(status_byte) do
{:ok, status} ->
{:ok,
[
seq_number: seq_number,
status: status,
node_id: NodeId.parse(node_id)
]}
{:error, %DecodeError{}} = error ->
error
end
end
@spec encode_status(status()) :: 0x06 | 0x07
def encode_status(:done), do: 0x06
def encode_status(:failed), do: 0x07
@spec decode_status(byte()) :: {:ok, status()} | {:error, DecodeError.t()}
def decode_status(0x06), do: {:ok, :done}
def decode_status(0x07), do: {:ok, :failed}
def decode_status(byte),
do: {:error, %DecodeError{value: byte, param: :status, command: :node_remove_status}}
end
|
lib/grizzly/zwave/commands/node_remove_status.ex
| 0.811937
| 0.753217
|
node_remove_status.ex
|
starcoder
|
defmodule Tune.Demands do
@moduledoc """
The Tune.Demands context.
"""
import Ecto.Query, warn: false
alias Tune.Repo
alias Tune.Demands.OnlineConcertDemand
@doc """
Returns the list of online_concert_demands.
## Examples
iex> list_online_concert_demands()
[%OnlineConcertDemand{}, ...]
"""
def list_online_concert_demands(spotify_username) do
Repo.all(from(demand in OnlineConcertDemand, where: demand.user_id == ^spotify_username,
order_by: [desc: demand.updated_at]))
end
@doc """
Gets a single online_concert_demand.
Raises `Ecto.NoResultsError` if the Online concert demand does not exist.
## Examples
iex> get_online_concert_demand!(123)
%OnlineConcertDemand{}
iex> get_online_concert_demand!(456)
** (Ecto.NoResultsError)
"""
def get_online_concert_demand!(id), do: Repo.get!(OnlineConcertDemand, id)
@doc """
Creates a online_concert_demand.
## Examples
iex> create_online_concert_demand(%{field: value})
{:ok, %OnlineConcertDemand{}}
iex> create_online_concert_demand(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_online_concert_demand(attrs \\ %{}) do
%OnlineConcertDemand{}
|> OnlineConcertDemand.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a online_concert_demand.
## Examples
iex> update_online_concert_demand(online_concert_demand, %{field: new_value})
{:ok, %OnlineConcertDemand{}}
iex> update_online_concert_demand(online_concert_demand, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_online_concert_demand(%OnlineConcertDemand{} = online_concert_demand, attrs) do
online_concert_demand
|> OnlineConcertDemand.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a online_concert_demand.
## Examples
iex> delete_online_concert_demand(online_concert_demand)
{:ok, %OnlineConcertDemand{}}
iex> delete_online_concert_demand(online_concert_demand)
{:error, %Ecto.Changeset{}}
"""
def delete_online_concert_demand(%OnlineConcertDemand{} = online_concert_demand) do
Repo.delete(online_concert_demand)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking online_concert_demand changes.
## Examples
iex> change_online_concert_demand(online_concert_demand)
%Ecto.Changeset{data: %OnlineConcertDemand{}}
"""
def change_online_concert_demand(%OnlineConcertDemand{} = online_concert_demand, attrs \\ %{}) do
OnlineConcertDemand.changeset(online_concert_demand, attrs)
end
end
|
lib/tune/tune/demands.ex
| 0.792464
| 0.404831
|
demands.ex
|
starcoder
|
defmodule EspEx.StreamName do
alias StreamName
@moduledoc """
A StreamName is a module to manage the location where events are written.
Think of stream names as a URL for where your events are located.
The StreamName struct provides an easy way to access the data that otherwise
would be in a String, which would require always validation and take more
time to extract the relevant information out of it.
Stream names are **camelCased**.
Sometimes we refer to "Streams" but we actually mean "Stream names".
A full stream name might look like: `campaign:command+position-123`.
- `campaign` is the stream name **category**
- category is required
- `command` and `position` are the stream **types**
- `123` is the stream `identifier` (string, will be UUID)
- identifier is optional
- If the stream name has no `identifier`, the dash must be omitted
- Any dash after the first dash are considered part of the identifier
- If the stream has no types, `:` must be omitted
- Types must be separated by the `+` sign and must always be sorted
- types are optional
The struct coming out of `from_string` should look like:
%StreamName{category: "campaign", identifier: "123", types: stream_nameSet<"command",
"position">}
The function `to_string` should convert it back to
`campaign:command+position-123`
"""
@type id :: String.t() | nil
@type t :: %EspEx.StreamName{
category: String.t(),
identifier: id(),
types: list(String.t())
}
@enforce_keys [:category]
defstruct(category: "", identifier: nil, types: :ordsets.new())
def empty, do: %__MODULE__{category: ""}
@doc """
Creates a new StreamName struct.
## Examples
iex> EspEx.StreamName.from_string("campaign:command+position-123")
%EspEx.StreamName{category: "campaign",
identifier: "123",
types: :ordsets.from_list(["command", "position"])}
"""
def new(category), do: new(category, nil, [])
def new(category, identifier), do: new(category, identifier, [])
def new("", _, _), do: raise(ArgumentError, message: "category is blank")
@spec new(
category :: String.t(),
identifier :: String.t() | nil,
types :: list(String.t())
) :: EspEx.StreamName.t()
def new(category, identifier, types)
when is_bitstring(category) and
(is_nil(identifier) or is_bitstring(identifier)) and is_list(types) do
category = String.trim(category)
category_empty!(category)
identifier = trim_or_nil(identifier)
types = Enum.map(types, &String.trim/1)
%__MODULE__{
category: category,
identifier: identifier,
types: :ordsets.from_list(types)
}
end
defp trim_or_nil(nil), do: nil
defp trim_or_nil(identifier), do: String.trim(identifier)
@doc """
Creates a StreamName struct with a provided string as an arguement.
## Examples
iex> EspEx.StreamName.from_string("campaign:command+position-123")
%EspEx.StreamName{category: "campaign",
identifier: "123",
types: :ordsets.from_list(["command", "position"])}
"""
@spec from_string(text :: String.t()) :: EspEx.StreamName.t()
def from_string(text) when is_bitstring(text) do
category = extract_category(text)
category_empty!(category)
identifier = extract_identifier(text)
types = extract_types(text, category, identifier)
new(category, identifier, types)
end
defp category_empty!(""), do: raise(ArgumentError, "Category is blank")
defp category_empty!(_), do: nil
defp extract_category(string) do
String.split(string, ":")
|> List.first()
|> String.split("-")
|> List.first()
|> String.trim()
end
defp extract_identifier(string) do
identifier = Regex.run(~r/-(.+)/, string)
if identifier == nil do
nil
else
List.last(identifier)
end
end
defp extract_types(string, category, identifier) do
types =
string
|> String.trim_leading(category)
|> String.trim_leading(":")
|> String.trim_trailing("-#{identifier}")
|> String.trim_trailing("+")
|> String.split("+")
if types == [""] do
:ordsets.new()
else
:ordsets.from_list(types)
end
end
defimpl String.Chars do
@doc """
Returns a string when provided with a StreamName struct.
## Examples
iex> stream_name = %EspEx.StreamName{category: "campaign", identifier: "123", types: :ordsets.from_list(["command", "position"])}
iex> EspEx.StreamName.to_string(stream_name)
"campaign:command+position-123"
"""
@spec to_string(stream_name :: EspEx.StreamName.t()) :: String.t()
def to_string(%EspEx.StreamName{
category: category,
identifier: identifier,
types: types
}) do
identifier = identifier_to_string(identifier)
types = types_to_string(types)
"#{category}#{types}#{identifier}"
end
defp identifier_to_string(nil), do: ""
defp identifier_to_string(identifier), do: "-#{identifier}"
defp types_to_string([]), do: ""
defp types_to_string(types), do: ":#{Enum.join(types, "+")}"
end
@doc """
Returns `true` if provided list is in provided stream_name's types.
## Examples
iex> stream_name = %EspEx.StreamName{category: "campaign", identifier: nil, types: :ordsets.from_list(["command", "position"])}
iex> list = ["command", "position"]
iex> EspEx.StreamName.has_all_types(stream_name, list)
true
"""
@spec has_all_types(
stream_name :: EspEx.StreamName.t(),
list :: list(String.t())
) :: boolean()
def has_all_types(%__MODULE__{types: types}, list) do
list
|> :ordsets.from_list()
|> :ordsets.is_subset(types)
end
@doc """
Returns `true` if StreamName struct has no identifier, but has a types.
Returns `false` if StreamName struct has an identifier.
## Examples
iex> stream_name = %EspEx.StreamName{category: "campaign", identifier: 123, types: :ordsets.from_list(["command", "position"])}
iex> EspEx.StreamName.category?(stream_name)
false
"""
@spec category?(stream_name :: EspEx.StreamName.t()) :: boolean()
def category?(%__MODULE__{identifier: nil}), do: true
def category?(%__MODULE__{}), do: false
@doc """
Returns a string of the StreamName with the position appended to the end.
## Examples
iex> stream_name = %EspEx.StreamName{category: "campaign", identifier: 123, types: :ordsets.from_list(["command", "position"])}
iex> EspEx.StreamName.position_identifier(stream_name, 1)
"campaign:command+position-123/1"
"""
def position_identifier(%__MODULE__{} = stream_name, nil) do
to_string(stream_name)
end
@spec position_identifier(
stream_name :: EspEx.StreamName.t(),
position :: non_neg_integer() | nil
) :: String.t()
def position_identifier(%__MODULE__{} = stream_name, position)
when is_integer(position) and position >= 0 do
to_string(stream_name) <> "/#{position}"
end
@spec subset?(
stream_name :: EspEx.StreamName.t(),
other_stream :: EspEx.StreamName.t()
) :: boolean
def subset?(%__MODULE__{} = stream_name, %__MODULE__{} = other_stream) do
case category?(other_stream) do
true ->
stream_name.category == other_stream.category &&
stream_name.types == other_stream.types
false ->
stream_name == other_stream
end
end
end
|
lib/esp_ex/stream_name.ex
| 0.882035
| 0.5835
|
stream_name.ex
|
starcoder
|
defmodule ChallengeGov.Analytics do
@moduledoc """
Analytics context
"""
@behaviour Stein.Filter
import Ecto.Query
alias ChallengeGov.Challenges.Challenge
alias ChallengeGov.Repo
alias Stein.Filter
def get_challenges(opts \\ []) do
Challenge
|> where([c], not is_nil(c.start_date))
|> where([c], not is_nil(c.end_date))
|> Filter.filter(opts[:filter], __MODULE__)
|> Repo.all()
end
def challenge_prefilter(challenges) do
Enum.filter(challenges, fn challenge ->
!is_nil(challenge.start_date) and !is_nil(challenge.end_date)
end)
end
def active_challenges(all_challenges) do
Enum.filter(all_challenges, fn challenge ->
challenge.status == "published" and
(challenge.sub_status == "open" or challenge.sub_status == "closed")
end)
end
def archived_challenges(all_challenges) do
Enum.filter(all_challenges, fn challenge ->
challenge.status == "published" and challenge.sub_status == "archived"
end)
end
def draft_challenges(all_challenges) do
Enum.filter(all_challenges, fn challenge ->
challenge.status == "draft"
end)
end
def launched_in_year?(challenge, year) do
challenge.start_date.year == year
end
def ongoing_in_year?(challenge, year) do
challenge.start_date.year < year and
challenge.end_date.year > year
end
def closed_in_year?(challenge, year) do
challenge.end_date.year == year
end
def get_year_range(start_year, end_year) do
start_year = get_start_year(start_year)
end_year = get_end_year(end_year)
Enum.to_list(start_year..end_year)
end
def get_start_year(""), do: default_start_year()
def get_start_year(year), do: year_to_integer(year)
def default_start_year, do: Repo.one(select(Challenge, [c], min(c.start_date))).year
def get_end_year(""), do: default_end_year()
def get_end_year(year), do: year_to_integer(year)
def default_end_year, do: DateTime.utc_now().year
defp year_to_integer(year) do
{year, _} = Integer.parse(year)
year
end
def calculate_prize_amount(challenge = %{imported: true}), do: challenge.prize_total || 0
def calculate_prize_amount(challenge), do: (challenge.prize_total || 0) / 1000
def all_challenges(challenges, years) do
challenges = challenge_prefilter(challenges)
data =
years
|> Enum.reduce(%{}, fn year, acc ->
Map.put(
acc,
year,
Enum.count(challenges, fn challenge ->
challenge.start_date.year == year
end)
)
end)
data_obj = %{
datasets: [
%{
data: data
}
]
}
options_obj = []
%{
data: data_obj,
options: options_obj
}
end
def challenges_by_primary_type(challenges, years) do
challenges =
challenges
|> challenge_prefilter()
|> Enum.filter(fn challenge ->
!is_nil(challenge.primary_type)
end)
labels = years
data =
challenges
|> Enum.group_by(fn challenge -> challenge.primary_type end)
colors = ColorStream.hex() |> Enum.take(Enum.count(data))
data =
data
|> Enum.with_index()
|> Enum.reduce([], fn {{primary_type, challenges}, index}, acc ->
grouped_challenges =
Enum.group_by(challenges, fn challenge -> challenge.start_date.year end)
data =
years
|> Enum.map(fn year ->
grouped_challenges = grouped_challenges[year] || []
Enum.count(grouped_challenges)
end)
data = %{
label: primary_type,
data: data,
borderWidth: 1,
backgroundColor: "##{Enum.at(colors, index)}"
}
acc ++ [data]
end)
data_obj = %{
labels: labels,
datasets: data
}
options_obj = [
options: %{
plugins: %{
legend: %{
display: true,
position: "bottom"
}
},
scales: %{
x: %{
stacked: true
},
y: %{
stacked: true
}
}
}
]
%{
data: data_obj,
options: options_obj
}
end
def challenges_hosted_externally(challenges, years) do
challenges = challenge_prefilter(challenges)
colors = ColorStream.hex() |> Enum.take(2)
labels = years
data =
challenges
|> Enum.group_by(fn challenge -> is_nil(challenge.external_url) end)
|> Enum.reduce([], fn {hosted_internally, challenges}, acc ->
grouped_challenges =
Enum.group_by(challenges, fn challenge -> challenge.start_date.year end)
data =
years
|> Enum.map(fn year ->
grouped_challenges = grouped_challenges[year] || []
Enum.count(grouped_challenges)
end)
{label, color_index} =
if hosted_internally, do: {"Hosted on Challenge.gov", 0}, else: {"Hosted externally", 1}
data = %{
label: label,
data: data,
backgroundColor: "##{Enum.at(colors, color_index)}"
}
acc ++ [data]
end)
data_obj = %{
labels: labels,
datasets: data
}
options_obj = [
options: %{
plugins: %{
legend: %{
display: true,
position: "bottom"
}
}
}
]
%{
data: data_obj,
options: options_obj
}
end
def total_cash_prizes(challenges, years) do
challenges = challenge_prefilter(challenges)
data =
years
|> Enum.reduce(%{}, fn year, acc ->
total_prize_amount =
challenges
|> Enum.filter(fn challenge -> challenge.start_date.year == year end)
|> Enum.map(fn challenge ->
calculate_prize_amount(challenge)
end)
|> Enum.sum()
Map.put(acc, year, total_prize_amount)
end)
data_obj = %{
datasets: [
%{
data: data
}
]
}
options_obj = [
options: %{
format: "currency",
plugins: %{
legend: %{
display: false
},
scales: %{
y: %{
beginAtZero: true
}
}
}
}
]
%{
data: data_obj,
options: options_obj
}
end
def challenges_by_legal_authority(challenges, years) do
challenges =
challenges
|> challenge_prefilter()
|> Enum.filter(fn challenge ->
!is_nil(challenge.legal_authority)
end)
colors = ColorStream.hex() |> Enum.take(2)
labels = years
data =
challenges
|> Enum.group_by(fn challenge ->
challenge.legal_authority
|> String.downcase()
|> String.contains?("competes")
end)
|> Enum.reduce([], fn {is_america_competes, challenges}, acc ->
grouped_challenges =
Enum.group_by(challenges, fn challenge -> challenge.start_date.year end)
data =
years
|> Enum.map(fn year ->
grouped_challenges = grouped_challenges[year] || []
Enum.count(grouped_challenges)
end)
{label, color_index} =
if is_america_competes, do: {"America Competes", 0}, else: {"Other", 1}
data = %{
label: label,
data: data,
backgroundColor: "##{Enum.at(colors, color_index)}"
}
acc ++ [data]
end)
data_obj = %{
labels: labels,
datasets: data
}
options_obj = [
options: %{
plugins: %{
legend: %{
display: true,
position: "bottom"
}
}
}
]
%{
data: data_obj,
options: options_obj
}
end
def participating_lead_agencies(challenges, years) do
challenges =
challenges
|> challenge_prefilter()
|> Enum.filter(fn challenge -> !is_nil(challenge.agency_id) end)
labels = years
launched_data =
years
|> Enum.map(fn year ->
challenges
|> Enum.filter(fn challenge -> launched_in_year?(challenge, year) end)
|> Enum.uniq_by(fn challenge -> challenge.agency_id end)
|> Enum.count()
end)
data = [
%{
data: launched_data
}
]
data_obj = %{
labels: labels,
datasets: data
}
options_obj = []
%{
data: data_obj,
options: options_obj
}
end
@impl Stein.Filter
def filter_on_attribute({"agency_id", value}, query) do
where(query, [c], c.agency_id == ^value)
end
def filter_on_attribute({"year_filter", value}, query) do
query
|> maybe_filter_start_year(value)
|> maybe_filter_end_year(value)
end
defp maybe_filter_start_year(query, %{"start_year" => ""}), do: query
defp maybe_filter_start_year(query, %{"target_date" => "start", "start_year" => year}) do
{year, _} = Integer.parse(year)
where(query, [c], fragment("DATE_PART('year', ?)", c.start_date) >= ^year)
end
defp maybe_filter_start_year(query, %{"target_date" => "end", "start_year" => year}) do
{year, _} = Integer.parse(year)
where(query, [c], fragment("DATE_PART('year', ?)", c.end_date) >= ^year)
end
defp maybe_filter_end_year(query, %{"end_year" => ""}), do: query
defp maybe_filter_end_year(query, %{"target_date" => "start", "end_year" => year}) do
{year, _} = Integer.parse(year)
where(query, [c], fragment("DATE_PART('year', ?)", c.start_date) <= ^year)
end
defp maybe_filter_end_year(query, %{"target_date" => "end", "end_year" => year}) do
{year, _} = Integer.parse(year)
where(query, [c], fragment("DATE_PART('year', ?)", c.end_date) <= ^year)
end
end
|
lib/challenge_gov/analytics.ex
| 0.742141
| 0.591251
|
analytics.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.NodeAddDSKReport do
@moduledoc """
The Z-Wave Command `NODE_ADD_DSK_REPORT`
This report is used by the including controller to ask for the DSK
for the device that is being included.
## Params
- `:seq_number` - sequence number for the command (required)
- `:input_dsk_length` - the required number of bytes must be in the `:dsk`
field to be authenticated (optional default: `0`)
- `:dsk` - the DSK for the device see `Grizzly.ZWave.DSK` for more
information (required)
The `:input_dsk_length` field can be set to 0 if not provided. That means that
device does not require any user input to the DSK set command to authenticate
the device. This case is normal when `:s2_unauthenticated` or client side
authentication has been given.
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.{DSK, Command, DecodeError}
alias Grizzly.ZWave.CommandClasses.NetworkManagementInclusion
@type param ::
{:seq_number, Grizzly.seq_number()}
| {:input_dsk_length, 0..16}
| {:dsk, DSK.t()}
@impl true
@spec new([param]) :: {:ok, Command.t()}
def new(params) do
:ok = validate_seq_number(params)
:ok = validate_dsk(params)
params = validate_and_ensure_input_dsk_length(params)
command = %Command{
name: :node_add_dsk_report,
command_byte: 0x13,
command_class: NetworkManagementInclusion,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
def encode_params(command) do
seq_number = Command.param!(command, :seq_number)
input_dsk_length = Command.param!(command, :input_dsk_length)
dsk = Command.param!(command, :dsk)
{:ok, dsk} = DSK.parse(dsk)
<<seq_number, input_dsk_length>> <> dsk.raw
end
@impl true
@spec decode_params(binary()) :: {:ok, [param()]} | {:error, DecodeError.t()}
def decode_params(<<seq_number, _::size(4), input_dsk_length::size(4), dsk_bin::binary>>) do
{:ok, [seq_number: seq_number, input_dsk_length: input_dsk_length, dsk: DSK.new(dsk_bin)]}
end
defp validate_seq_number(params) do
case Keyword.get(params, :seq_number) do
nil ->
raise ArgumentError, """
When building the Z-Wave command #{inspect(__MODULE__)} the param :seq_number is
required.
Please ensure you passing a :seq_number option to the command
"""
seq_number when seq_number >= 0 and seq_number <= 255 ->
:ok
seq_number ->
raise ArgumentError, """
When build the Z-Wave command #{inspect(__MODULE__)} the param :seq_number should be
be an integer between 0 and 255 (0xFF) inclusive.
It looks like you passed: #{inspect(seq_number)}
"""
end
end
defp validate_dsk(params) do
case Keyword.get(params, :dsk) do
nil ->
raise ArgumentError, """
When building the Z-Wave command #{inspect(__MODULE__)} the param :dsk is
required.
Please ensure you passing a :dsk option to the command
"""
_dsk ->
:ok
end
end
def validate_and_ensure_input_dsk_length(params) do
case Keyword.get(params, :input_dsk_length) do
nil ->
Keyword.put(params, :input_dsk_length, 0)
length when length >= 0 and length <= 16 ->
params
length ->
raise ArgumentError, """
When build the Z-Wave command #{inspect(__MODULE__)} the param :input_dsk_length should be
be an integer between 0 and 16 inclusive.
It looks like you passed: #{inspect(length)}
"""
end
end
end
|
lib/grizzly/zwave/commands/node_add_dsk_report.ex
| 0.853257
| 0.417093
|
node_add_dsk_report.ex
|
starcoder
|
defmodule Resourceful.Type.Ecto do
@moduledoc """
Creates a `Resourceful.Type` from an `Ecto.Schema` module. The use case
is that internal data will be represented by the schema and client-facing data
will be represented by the resource definition. Additionally, field names may
be mapped differently to the client, such as camel case values. This can be
done individually or with a single function as an option.
Since `Resourceful.Type` instances use the same type system as Ecto, this
is a relatively straightforward conversion.
"""
alias Resourceful.{Type, Util}
alias Resourceful.Type.Attribute
@doc """
Returns a `Resourceful.Type.Attribute` based on a field from an `Ecto.Schema`
module.
"""
@spec attribute(module(), atom(), keyword()) :: %Attribute{}
def attribute(schema, field_name, opts \\ []) do
Attribute.new(
transform_name(field_name, Keyword.get(opts, :transform_names)),
schema.__schema__(:type, field_name),
Keyword.put(opts, :map_to, field_name)
)
end
@doc """
Returns a `Resourceful.Type` from an `Ecto.Schema` module by inferring
values from the schema.
## Options
For most options, a list of schema field names (atoms) will be passed in
specifying the type's configuration for those fields. In these cases a value
of `true` or `:all` will result in all fields being used. For example if you
wanted to be able to query all fields, you would pass `[query: :all]`.
* `:except` - Schema fields to be excluded from the type.
* `:filter` - Schema fields allowed to be filtered.
* `:only` - Schema fields to be included in the type.
* `:query` - Schema fields allowed to be queried (sorted and filtered).
* `:sort` - Schema fields allowed to be sorted.
* `:transform_names` - A single argument function that takes the field name
(an atom) and transforms it into either another atom or a string. A type
of case conversion is the most likely use case.
Addionally, any options not mentioned above will be passed directly to
`Resourceful.Type.new/2`.
"""
@spec type_with_schema(module(), keyword()) :: %Type{}
def type_with_schema(schema, opts \\ []) do
Type.new(
Keyword.get(opts, :name, schema.__schema__(:source)),
type_opts(schema, opts)
)
end
defp attr_opts(attr_name, opts) do
Keyword.merge(
opts,
Enum.map([:filter, :query, :sort], &{&1, in_opt_list?(opts, &1, attr_name)})
)
end
defp expand_all_opt({opt, val}, fields)
when opt in [:filter, :query, :sort] and val in [:all, true],
do: {opt, fields}
defp expand_all_opt(opt, _), do: opt
defp expand_all_opts(fields, opts), do: Enum.map(opts, &expand_all_opt(&1, fields))
defp in_opt_list?(opts, key, attr) do
opts
|> Keyword.get(key, [])
|> Enum.member?(attr)
end
defp type_opts(schema, opts) do
fields = Util.except_or_only!(opts, schema.__schema__(:fields))
opts = expand_all_opts(fields, opts)
Keyword.merge(opts,
fields: Enum.map(fields, &attribute(schema, &1, attr_opts(&1, opts))),
meta: %{ecto: %{schema: schema}},
id: Keyword.get(opts, :id, schema.__schema__(:primary_key))
)
end
defp transform_name(field_name, nil), do: to_string(field_name)
defp transform_name(field_name, func) when is_function(func, 1), do: func.(field_name)
end
|
lib/resourceful/type/ecto.ex
| 0.913382
| 0.674265
|
ecto.ex
|
starcoder
|
defmodule Aja do
@moduledoc ~S"""
Convenience macros to work with Aja's data structures.
Use `import Aja` to import everything, or import only the macros you need.
"""
@doc ~S"""
A sigil to build [IO data](https://hexdocs.pm/elixir/IO.html#module-io-data) and avoid string concatenation.
Use `import Aja` to use it, or `import Aja, only: [sigil_i: 2]`.
This sigil provides a faster version of string interpolation which:
- will build a list with all chunks instead of concatenating them as a string
- uses `Aja.IO.to_iodata/1` on interpolated values instead of `to_string/1`, which:
* will keep lists untouched, without any validation or transformation
* will cast anything else using `to_string/1`
Works with both [IO data](https://hexdocs.pm/elixir/IO.html#module-io-data) and
[Chardata](https://hexdocs.pm/elixir/IO.html?#module-chardata).
See their respective documentation for more information.
## Examples
iex> ~i"atom: #{:foo}, charlist: #{'abc'}, number: #{12 + 2.35}\n"
["atom: ", "foo", ", charlist: ", 'abc', ", number: ", "14.35", 10]
iex> ~i"abc#{['def' | "ghi"]}"
["abc", ['def' | "ghi"]]
iex> ~i"<NAME>"
"<NAME>"
IO data can often be used as is without ever generating the corresponding string.
If needed however, IO data can be cast as a string using `IO.iodata_to_binary/1`,
and chardata using `List.to_string/1`. In most cases, both should be the same:
iex> IO.iodata_to_binary(~i"atom: #{:foo}, charlist: #{'abc'}, number: #{12 + 2.35}\n")
"atom: foo, charlist: abc, number: 14.35\n"
iex> List.to_string(~i"abc#{['def' | "ghi"]}")
"abcdefghi"
Those are the exact same values returned by a regular string interpolation, without
the `~i` sigil:
iex> "atom: #{:foo}, charlist: #{'abc'}, number: #{12 + 2.35}\n"
"atom: foo, charlist: abc, number: 14.35\n"
iex> "abc#{['def' | "ghi"]}"
"abcdefghi"
"""
defmacro sigil_i(term, modifiers)
defmacro sigil_i({:<<>>, _, [piece]}, []) when is_binary(piece) do
Macro.unescape_string(piece)
end
defmacro sigil_i({:<<>>, _line, pieces}, []) do
Enum.map(pieces, &sigil_i_piece/1)
end
defp sigil_i_piece({:"::", _, [{{:., _, _}, _, [expr]}, {:binary, _, _}]}) do
quote do
Aja.IO.to_iodata(unquote(expr))
end
end
defp sigil_i_piece(piece) when is_binary(piece) do
case Macro.unescape_string(piece) do
<<char>> -> char
binary -> binary
end
end
@doc ~S"""
Convenience macro to create or pattern match on `Aja.OrdMap`s.
Use `import Aja` to use it, or `import Aja, only: [ord: 1]`.
## Creation examples
iex> ord(%{"一" => 1, "二" => 2, "三" => 3})
ord(%{"一" => 1, "二" => 2, "三" => 3})
iex> ord(%{a: "Ant", b: "Bat", c: "Cat"})
ord(%{a: "Ant", b: "Bat", c: "Cat"})
## Pattern matching examples
iex> ord(%{b: bat}) = ord(%{a: "Ant", b: "Bat", c: "Cat"}); bat
"Bat"
## Replace existing keys examples
iex> ordered = ord(%{a: "Ant", b: "Bat", c: "Cat"})
iex> ord(%{ordered | b: "Buffalo"})
ord(%{a: "Ant", b: "Buffalo", c: "Cat"})
iex> ord(%{ordered | z: "Zebra"})
** (KeyError) key :z not found in: ord(%{a: "Ant", b: "Bat", c: "Cat"})
"""
defmacro ord({:%{}, _context, [{:|, _context2, [ordered, key_values]}]} = call) do
unless Enum.all?(key_values, fn key_value -> match?({_, _}, key_value) end) do
raise_ord_argument_error(call)
end
quote do
Aja.OrdMap.replace_many!(unquote(ordered), unquote(key_values))
end
end
defmacro ord({:%{}, context, key_value_pairs}) do
case __CALLER__.context do
nil ->
Aja.OrdMap.from_list_ast(key_value_pairs, __CALLER__)
:match ->
match_map = to_match_map(key_value_pairs, context)
quote do
%Aja.OrdMap{__ord_map__: unquote(match_map)}
end
:guard ->
raise ArgumentError, "`Aja.ord/1` cannot be used in guards"
end
end
defmacro ord(call) do
raise_ord_argument_error(call)
end
defp raise_ord_argument_error(call) do
raise ArgumentError, ~s"""
Incorrect use of `Aja.ord/1`:
ord(#{Macro.to_string(call)}).
To create a new ordered map:
ord_map = ord(%{b: "Bat", a: "Ant", c: "Cat"})
To pattern-match:
ord(%{a: ant}) = ord_map
To replace an-existing key:
ord(%{ord_map | b: "Buffalo"})
"""
end
defp to_match_map(key_value_pairs, context) do
wildcard_pairs =
for {key, value} <- key_value_pairs do
{key,
quote do
[_ | unquote(value)]
end}
end
{:%{}, context, wildcard_pairs}
end
@doc """
Returns the size of an `ord_map`.
It is implemented as a macro so that it can be used in guards.
When used outside of a guard, it will just be replaced by a call to `Aja.OrdMap.size/1`.
When used in guards, it will fail if called on something else than an `Aja.OrdMap`.
It is recommended to verify the type first.
Runs in constant time.
## Examples
iex> import Aja
iex> ord_map = Aja.OrdMap.new(a: 1, b: 2, c: 3)
iex> match?(v when ord_size(v) > 5, ord_map)
false
iex> match?(v when ord_size(v) < 5, ord_map)
true
iex> ord_size(ord_map)
3
"""
defmacro ord_size(ord_map) do
case __CALLER__.context do
nil ->
quote do
Aja.OrdMap.size(unquote(ord_map))
end
:match ->
raise ArgumentError, "`Aja.ord_size/1` cannot be used in match"
:guard ->
quote do
# TODO simplify when stop supporting Elixir 1.10
:erlang.map_get(:__ord_map__, unquote(ord_map)) |> :erlang.map_size()
end
end
end
@doc """
Convenience macro to create or pattern match on `Aja.Vector`s.
## Examples
iex> import Aja
iex> vec([1, 2, 3])
vec([1, 2, 3])
iex> vec(first ||| last) = Aja.Vector.new(0..99_999); {first, last}
{0, 99999}
iex> vec([1, 2, var, _, _, _]) = Aja.Vector.new(1..6); var
3
iex> vec([_, _, _]) = Aja.Vector.new(1..6)
** (MatchError) no match of right hand side value: vec([1, 2, 3, 4, 5, 6])
It also supports ranges with **constant** values:
iex> vec(0..4) = Aja.Vector.new(0..4)
vec([0, 1, 2, 3, 4])
Variable lists or dynamic ranges cannot be passed:
vec(my_list) # invalid
vec(1..n) # invalid
## Explanation
The `vec/1` macro generates the AST at compile time instead of building the vector
at runtime. This can speedup the instanciation of vectors of known size.
"""
defmacro vec(list) when is_list(list) do
ast_from_list(list, __CALLER__)
end
defmacro vec({:.., _, [first, last]} = call) do
case Enum.map([first, last], &Macro.expand(&1, __CALLER__)) do
[first, last] when is_integer(first) and is_integer(last) ->
first..last
|> Enum.to_list()
|> ast_from_list(__CALLER__)
_ ->
raise ArgumentError, ~s"""
Incorrect use of `Aja.vec/1`:
vec(#{Macro.to_string(call)}).
The `vec(a..b)` syntax can only be used with constants:
vec(1..100)
"""
end
end
# TODO remove when dropping support for Elixir < 1.12
stepped_range_available? = Version.compare(System.version(), "1.12.0-rc.0") != :lt
if stepped_range_available? do
defmacro vec({:"..//", _, [first, last, step]} = call) do
case Enum.map([first, last, step], &Macro.expand(&1, __CALLER__)) do
[first, last, step] when is_integer(first) and is_integer(last) and is_integer(step) ->
Range.new(first, last, step)
|> Enum.to_list()
|> ast_from_list(__CALLER__)
_ ->
raise ArgumentError, ~s"""
Incorrect use of `Aja.vec/1`:
vec(#{Macro.to_string(call)}).
The `vec(a..b//c)` syntax can only be used with constants:
vec(1..100//5)
"""
end
end
end
defmacro vec({:|||, _, [first, last]}) do
case __CALLER__.context do
:match ->
quote do
%Aja.Vector{__vector__: unquote(Aja.Vector.Raw.from_first_last_ast(first, last))}
end
_ ->
raise ArgumentError, "The `vec(x ||| y)` syntax can only be used in matches"
end
end
defmacro vec({:_, _, _}) do
quote do
%Aja.Vector{__vector__: _}
end
end
defmacro vec(call) do
raise ArgumentError, ~s"""
Incorrect use of `Aja.vec/1`:
vec(#{Macro.to_string(call)}).
To create a new vector from a fixed-sized list:
vector = vec([:foo, 4, a + b])
To create a new vector from a constant range:
vector = vec(1..100)
! Variables cannot be used as lists or inside the range declaration !
vec(my_list) # invalid
vec(1..n) # invalid
To pattern-match:
vec([1, 2, x, _]) = vector
vec([]) = empty_vector
vec(_) = vector
vec(first ||| last) = vector
"""
end
defp ast_from_list([head | tail], %{context: nil}) do
if Macro.quoted_literal?(head) do
do_ast_from_list([head | tail])
else
quote do
first = unquote(head)
unquote(
do_ast_from_list([
quote do
first
end
| tail
])
)
end
end
end
defp ast_from_list(list, _caller) do
do_ast_from_list(list)
end
defp do_ast_from_list(list) do
internal_ast = Aja.Vector.Raw.from_list_ast(list)
quote do
%Aja.Vector{__vector__: unquote(internal_ast)}
end
end
@doc """
Returns the size of a `vector`.
It is implemented as a macro so that it can be used in guards.
When used outside of a guard, it will just be replaced by a call to `Aja.Vector.size/1`.
When used in guards, it will fail if called on something else than an `Aja.Vector`.
It is recommended to verify the type first.
Runs in constant time.
## Examples
iex> import Aja
iex> match?(v when vec_size(v) > 20, Aja.Vector.new(1..10))
false
iex> match?(v when vec_size(v) < 5, Aja.Vector.new([1, 2, 3]))
true
iex> vec_size(Aja.Vector.new([1, 2, 3]))
3
"""
defmacro vec_size(vector) do
case __CALLER__.context do
nil ->
quote do
Aja.Vector.size(unquote(vector))
end
:match ->
raise ArgumentError, "`Aja.vec_size/1` cannot be used in match"
:guard ->
quote do
:erlang.element(
1,
# TODO simplify when stop supporting Elixir 1.10
:erlang.map_get(:__vector__, unquote(vector))
)
end
end
end
plus_enabled? = Version.compare(System.version(), "1.11.0") != :lt
if plus_enabled? do
@doc """
Convenience operator to concatenate an enumerable `right` to a vector `left`.
`left` has to be an `Aja.Vector`, `right` can be any `Enumerable`.
It is just an alias for `Aja.Vector.concat/2`.
Only available on Elixir versions >= 1.11.
## Examples
iex> import Aja
iex> vec(5..1) +++ vec([:boom, nil])
vec([5, 4, 3, 2, 1, :boom, nil])
iex> vec(5..1) +++ 0..3
vec([5, 4, 3, 2, 1, 0, 1, 2, 3])
"""
# TODO remove hack to support 1.10
defdelegate unquote(if(plus_enabled?, do: String.to_atom("+++"), else: :++))(left, right),
to: Aja.Vector,
as: :concat
end
end
|
lib/aja.ex
| 0.775817
| 0.653459
|
aja.ex
|
starcoder
|
defmodule MarsRover.Rover do
@moduledoc """
A rover implementation. It can be sent to planets.
"""
use GenServer
defstruct [:planet, :coordinates, :direction]
# Initialization
@spec start_link(atom() | pid(), Int.t(), Int.t(), atom()) :: :ignore | {:error, any()} | {:ok, pid()}
def start_link(planet, initial_x, initial_y, direction \\ :n) do
case MarsRover.Planet.has_obstacle?(planet, initial_x, initial_y) do
false ->
state = %__MODULE__{planet: planet, coordinates: {initial_x, initial_y}, direction: direction}
GenServer.start_link(__MODULE__, state)
_ -> {:error, :obstacle}
end
end
# API
@spec commands(atom() | pid(), String.t()) :: any()
def commands(rover, commands) do
GenServer.call(rover, {:commands, commands |> String.graphemes()})
end
@spec position(atom() | pid()) :: any()
def position(rover), do: GenServer.call(rover, :position)
# Callbacks
@spec init(any()) :: {:ok, any()}
def init(state), do: {:ok, state}
def handle_call({:commands, commands}, _from, state) do
case apply_commands(state, commands) do
{:error, info, new_state} -> {:reply, {:error, info}, new_state}
new_state -> {:reply, {:ok, new_state.coordinates}, new_state}
end
end
def handle_call(:position, _from, state), do: {:reply, state.coordinates, state}
# Helpers
@spec apply_commands(any(), List.t()) :: any()
def apply_commands(state, []), do: state
def apply_commands(state, [command | tail]) do
{x, y, d} = next_coordinates(state, command)
{x, y} = MarsRover.Planet.normalize_coordinates(state.planet, x, y)
case MarsRover.Planet.has_obstacle?(state.planet, x, y) do
false -> %{state | coordinates: {x, y}, direction: d} |> apply_commands(tail)
_ -> {:error, {:obstacle, {x, y}}, state}
end
end
@spec next_coordinates(any(), String.t()) :: any()
def next_coordinates(state, "f"), do: forward(state.coordinates, state.direction)
def next_coordinates(state, "b"), do: backward(state.coordinates, state.direction)
def next_coordinates(state, "l"), do: turn_left(state.coordinates, state.direction)
def next_coordinates(state, "r"), do: turn_right(state.coordinates, state.direction)
@spec turn_left({Int.t(), Int.t()}, :n | :s | :e | :w) :: {Int.t(), Int.t(), :e | :n | :s | :w}
def turn_left({x, y}, :n), do: {x, y, :w}
def turn_left({x, y}, :s), do: {x, y, :e}
def turn_left({x, y}, :e), do: {x, y, :n}
def turn_left({x, y}, :w), do: {x, y, :s}
@spec turn_right({Int.t(), Int.t()}, :n | :s | :e | :w) :: {Int.t(), Int.t(), :e | :n | :s | :w}
def turn_right({x, y}, :n), do: {x, y, :e}
def turn_right({x, y}, :s), do: {x, y, :w}
def turn_right({x, y}, :e), do: {x, y, :s}
def turn_right({x, y}, :w), do: {x, y, :n}
@spec forward({Int.t(), Int.t()}, atom()) :: {Int.t(), Int.t(), :n | :s | :e | :w}
def forward({x, y}, :n), do: {x, y - 1, :n}
def forward({x, y}, :s), do: {x, y + 1, :s}
def forward({x, y}, :e), do: {x + 1, y, :e}
def forward({x, y}, :w), do: {x - 1, y, :w}
@spec backward({Int.t(), Int.t()}, atom()) :: {Int.t(), Int.t(), :n | :s | :e | :w}
def backward({x, y}, :n), do: {x, y + 1, :n}
def backward({x, y}, :s), do: {x, y - 1, :s}
def backward({x, y}, :e), do: {x - 1, y, :e}
def backward({x, y}, :w), do: {x + 1, y, :w}
end
|
lib/rover.ex
| 0.86968
| 0.742935
|
rover.ex
|
starcoder
|
defmodule Aoc2018.Day01 do
alias Aoc2018.Day01
@doc """
--- Day 1: Chronal Calibration ---
"We've detected some temporal anomalies," one of Santa's Elves at the Temporal
Anomaly Research and Detection Instrument Station tells you. She sounded
pretty worried when she called you down here. "At 500-year intervals into the
past, someone has been changing Santa's history!"
"The good news is that the changes won't propagate to our time stream for
another 25 days, and we have a device" - she attaches something to your wrist
- "that will let you fix the changes with no such propagation delay. It's
configured to send you 500 years further into the past every few days; that
was the best we could do on such short notice."
"The bad news is that we are detecting roughly fifty anomalies throughout
time; the device will indicate fixed anomalies with stars. The other bad news
is that we only have one device and you're the best person for the job! Good
lu--" She taps a button on the device and you suddenly feel like you're
falling. To save Christmas, you need to get all fifty stars by December 25th.
Collect stars by solving puzzles. Two puzzles will be made available on each
day in the advent calendar; the second puzzle is unlocked when you complete
the first. Each puzzle grants one star. Good luck!
After feeling like you've been falling for a few minutes, you look at the
device's tiny screen. "Error: Device must be calibrated before first use.
Frequency drift detected. Cannot maintain destination lock." Below the
message, the device shows a sequence of changes in frequency (your puzzle
input). A value like +6 means the current frequency increases by 6; a value
like -3 means the current frequency decreases by 3.
For example, if the device displays frequency changes of +1, -2, +3, +1, then
starting from a frequency of zero, the following changes would occur:
Current frequency 0, change of +1; resulting frequency 1.
Current frequency 1, change of -2; resulting frequency -1.
Current frequency -1, change of +3; resulting frequency 2.
Current frequency 2, change of +1; resulting frequency 3.
In this example, the resulting frequency is 3.
Here are other example situations:
+1, +1, +1 results in 3
+1, +1, -2 results in 0
-1, -2, -3 results in -6
Starting with a frequency of zero, what is the resulting frequency after all
of the changes in frequency have been applied?
iex> calculate_frequency([+1, -2, +3, +1])
3
iex> calculate_frequency([+1, +1, +1])
3
iex> calculate_frequency([+1, +1, -2])
0
iex> calculate_frequency([-1, -2, -3])
-6
"""
def calculate_frequency(freqs) do
Enum.sum(freqs)
end
@doc """
You notice that the device repeats the same frequency change list over and
over. To calibrate the device, you need to find the first frequency it reaches
twice.
For example, using the same list of changes above, the device would loop as
follows:
Current frequency 0, change of +1; resulting frequency 1.
Current frequency 1, change of -2; resulting frequency -1.
Current frequency -1, change of +3; resulting frequency 2.
Current frequency 2, change of +1; resulting frequency 3.
(At this point, the device continues from the start of the list.)
Current frequency 3, change of +1; resulting frequency 4.
Current frequency 4, change of -2; resulting frequency 2, which has already
been seen.
In this example, the first frequency reached twice is 2. Note that your device
might need to repeat its list of frequency changes many times before a
duplicate frequency is found, and that duplicates might be found while in the
middle of processing the list.
Here are other examples:
+1, -1 first reaches 0 twice.
+3, +3, +4, -2, -4 first reaches 10 twice.
-6, +3, +8, +5, -6 first reaches 5 twice.
+7, +7, -2, -7, -4 first reaches 14 twice.
What is the first frequency your device reaches twice?
iex> first_duplicate([+1, -1])
0
iex> first_duplicate([+3, +3, +4, -2, -4])
10
iex> first_duplicate([-6, +3, +8, +5, -6])
5
iex> first_duplicate([+7, +7, -2, -7, -4])
14
"""
def first_duplicate(freqs) do
freqs
|> Stream.cycle()
|> Enum.reduce_while({0, MapSet.new([0])}, fn
f, {sum, seen} ->
new_sum = f + sum
if MapSet.member?(seen, new_sum) do
{:halt, new_sum}
else
{:cont, {new_sum, MapSet.put(seen, new_sum)}}
end
end)
end
end
|
2018_elixir/aoc_2018/lib/aoc_2018/day_01.ex
| 0.735262
| 0.763792
|
day_01.ex
|
starcoder
|
defmodule Phoenix.LiveView.JS do
@moduledoc ~S'''
Provides commands for executing JavaScript utility operations on the client.
JS commands support a variety of utility operations for common client-side
needs, such as adding or removing CSS classes, setting or removing tag attributes,
showing or hiding content, and transitioning in and out with animations.
While these operations can be accomplished via client-side hooks,
JS commands are DOM-patch aware, so operations applied
by the JS APIs will stick to elements across patches from the server.
In addition to purely client-side utilities, the JS commands include a
rich `push` API, for extending the default `phx-` binding pushes with
options to customize targets, loading states, and additional payload values.
## Client Utility Commands
The following utilities are included:
* `add_class` - Add classes to elements, with optional transitions
* `remove_class` - Remove classes from elements, with optional transitions
* `set_attribute` - Set an attribute on elements
* `remove_attribute` - Remove an attribute from elements
* `show` - Show elements, with optional transitions
* `hide` - Hide elements, with optional transitions
* `toggle` - Shows or hides elements based on visibility, with optional transitions
* `transition` - Apply a temporary transition to elements for animations
* `dispatch` - Dispatch a DOM event to elements
For example, the following modal component can be shown or hidden on the
client without a trip to the server:
alias Phoenix.LiveView.JS
def hide_modal(js \\ %JS{}) do
js
|> JS.hide(transition: "fade-out", to: "#modal")
|> JS.hide(transition: "fade-out-scale", to: "#modal-content")
end
def modal(assigns) do
~H"""
<div id="modal" class="phx-modal" phx-remove={hide_modal()}>
<div
id="modal-content"
class="phx-modal-content"
phx-click-away={hide_modal()}
phx-window-keydown={hide_modal()}
phx-key="escape"
>
<button class="phx-modal-close" phx-click={hide_modal()}>✖</button>
<p><%= @text %></p>
</div>
</div>
"""
end
## Enhanced Push Events
The `push/1` command allows you to extend the built-in pushed event handling
when a `phx-` event is pushed to the server. For example, you may wish to
target a specific component, specify additional payload values to include
with the event, apply loading states to external elements, etc. For example,
given this basic `phx-click` event:
<div phx-click="inc">+</div>
Imagine you need to target your current component, and apply a loading state
to the parent container while the client awaits the server acknowledgement:
alias Phoenix.LiveView.JS
<div phx-click={JS.push("inc", loading: ".thermo", target: @myself)}>+</div>
Push commands also compose with all other utilities. For example,
to add a class when pushing:
<div phx-click={
JS.push("inc", loading: ".thermo", target: @myself)
|> JS.add_class(".warmer", to: ".thermo")
}>+</div>
## Using dispatch with window.addEventListener
`dispatch/1` can be used to dispatch JavaScript events to elements.
For example, you can use `JS.dispatch("click", to: "#foo")`, to
dispatch a click event to an element.
This also means you can augment your elements with custom events,
by using JavaScript's `window.addEventListener` and then invoke
them with `dispatch/1`. For example, imagine you want to provide
a copy-to-clipboard functionality in your application. You can
add a custom event for it:
window.addEventListener("my_app:clipcopy", (event) => {
if ("clipboard" in navigator) {
const text = event.target.textContent;
navigator.clipboard.writeText(text);
} else {
alert("Sorry, your browser does not support clipboard copy.");
}
});
Now you can have a button like this:
<button phx-click={JS.dispatch("my_app:clipcopy", to: "#element-with-text-to-copy")}>
Copy content
</button>
The combination of `dispatch/1` with `window.addEventListener` is
a powerful mechanism to increase the amount of actions you can trigger
client-side from your LiveView code.
'''
alias Phoenix.LiveView.JS
defstruct ops: []
@default_transition_time 200
defimpl Phoenix.HTML.Safe, for: Phoenix.LiveView.JS do
def to_iodata(%Phoenix.LiveView.JS{} = js) do
Phoenix.HTML.Engine.html_escape(Phoenix.json_library().encode!(js.ops))
end
end
@doc """
Pushes an event to the server.
* `event` - The string event name to push.
## Options
* `:target` - The selector or component ID to push to
* `:loading` - The selector to apply the phx loading classes to
* `:page_loading` - Boolean to trigger the phx:page-loading-start and
phx:page-loading-stop events for this push. Defaults to `false`
* `:value` - The map of values to send to the server
## Examples
<button phx-click={JS.push("clicked")}>click me!</button>
<button phx-click={JS.push("clicked", value: %{id: @id})}>click me!</button>
<button phx-click={JS.push("clicked", page_loading: true)}>click me!</button>
"""
def push(event) when is_binary(event) do
push(%JS{}, event, [])
end
@doc "See `push/1`."
def push(event, opts) when is_binary(event) and is_list(opts) do
push(%JS{}, event, opts)
end
def push(%JS{} = js, event) when is_binary(event) do
push(js, event, [])
end
@doc "See `push/1`."
def push(%JS{} = js, event, opts) when is_binary(event) and is_list(opts) do
opts =
opts
|> validate_keys(:push, [:target, :loading, :page_loading, :value])
|> put_target()
|> put_value()
put_op(js, "push", Enum.into(opts, %{event: event}))
end
@doc """
Dispatches an event to the DOM.
* `event` - The string event name to dispatch.
## Options
* `:to` - The optional DOM selector to dispatch the event to.
Defaults to the interacted element.
* `:detail` - The optional detail map to dispatch along
with the client event. The details will be available in the
`event.detail` attribute for event listeners.
## Examples
window.addEventListener("click", e => console.log("clicked!", e.detail))
<button phx-click={JS.dispatch("click", to: ".nav")}>Click me!</button>
"""
def dispatch(js \\ %JS{}, event)
def dispatch(%JS{} = js, event), do: dispatch(js, event, [])
def dispatch(event, opts), do: dispatch(%JS{}, event, opts)
@doc "See `dispatch/2`."
def dispatch(%JS{} = js, event, opts) do
opts = validate_keys(opts, :dispatch, [:to, :detail])
args = %{event: event, to: opts[:to]}
args =
case Keyword.fetch(opts, :detail) do
{:ok, detail} -> Map.put(args, :detail, detail)
:error -> args
end
put_op(js, "dispatch", args)
end
@doc """
Toggles elements.
## Options
* `:to` - The optional DOM selector to toggle.
Defaults to the interacted element.
* `:in` - The string of classes to apply when toggling in, or
a 3-tuple containing the transition class, the class to apply
to start the transition, and the ending transition class, such as:
`{"ease-out duration-300", "opacity-0", "opacity-100"}`
* `:out` - The string of classes to apply when toggling out, or
a 3-tuple containing the transition class, the class to apply
to start the transition, and the ending transition class, such as:
`{"ease-out duration-300", "opacity-100", "opacity-0"}`
* `:time` - The time to apply the transition `:in` and `:out` classes.
Defaults #{@default_transition_time}
* `:display` - The optional display value to set when toggling in. Defaults `"block"`.
When the toggle is complete on the client, a `phx:show-start` or `phx:hide-start`, and
`phx:show-end` or `phx:hide-end` event will be dispatched to the toggled elements.
## Examples
<div id="item">My Item</div>
<button phx-click={JS.toggle(to: "#item")}>
toggle item!
</button>
<button phx-click={JS.toggle(to: "#item", in: "fade-in-scale", out: "fade-out-scale")}>
toggle fancy!
</button>
"""
def toggle(opts \\ [])
def toggle(%JS{} = js), do: toggle(js, [])
def toggle(opts) when is_list(opts), do: toggle(%JS{}, opts)
@doc "See `toggle/2`."
def toggle(js, opts) when is_list(opts) do
opts = validate_keys(opts, :toggle, [:to, :in, :out, :display, :time])
in_classes = transition_class_names(opts[:in])
out_classes = transition_class_names(opts[:out])
time = opts[:time] || @default_transition_time
put_op(js, "toggle", %{
to: opts[:to],
display: opts[:display],
ins: in_classes,
outs: out_classes,
time: time
})
end
@doc """
Shows elements.
## Options
* `:to` - The optional DOM selector to show.
Defaults to the interacted element.
* `:transition` - The string of classes to apply before showing or
a 3-tuple containing the transition class, the class to apply
to start the transition, and the ending transition class, such as:
`{"ease-out duration-300", "opacity-0", "opacity-100"}`
* `:time` - The time to apply the transition from `:transition`.
Defaults #{@default_transition_time}
* `:display` - The optional display value to set when showing. Defaults `"block"`.
When the show is complete on the client, a `phx:show-start` and `phx:show-end` event
will be dispatched to the shown elements.
## Examples
<div id="item">My Item</div>
<button phx-click={JS.show(to: "#item")}>
show!
</button>
<button phx-click={JS.show(to: "#item", transition: "fade-in-scale")}>
show fancy!
</button>
"""
def show(opts \\ [])
def show(%JS{} = js), do: show(js, [])
def show(opts) when is_list(opts), do: show(%JS{}, opts)
@doc "See `show/2`."
def show(js, opts) when is_list(opts) do
opts = validate_keys(opts, :show, [:to, :transition, :display, :time])
transition = transition_class_names(opts[:transition])
time = opts[:time] || @default_transition_time
put_op(js, "show", %{
to: opts[:to],
display: opts[:display],
transition: transition,
time: time
})
end
@doc """
Hides elements.
## Options
* `:to` - The optional DOM selector to hide.
Defaults to the interacted element.
* `:transition` - The string of classes to apply before hiding or
a 3-tuple containing the transition class, the class to apply
to start the transition, and the ending transition class, such as:
`{"ease-out duration-300", "opacity-0", "opacity-100"}`
* `:time` - The time to apply the transition from `:transition`.
Defaults #{@default_transition_time}
When the show is complete on the client, a `phx:hide-start` and `phx:hide-end`
event will be dispatched to the hidden elements.
## Examples
<div id="item">My Item</div>
<button phx-click={JS.hide(to: "#item")}>
hide!
</button>
<button phx-click={JS.hide(to: "#item", transition: "fade-out-scale")}>
hide fancy!
</button>
"""
def hide(opts \\ [])
def hide(%JS{} = js), do: hide(js, [])
def hide(opts) when is_list(opts), do: hide(%JS{}, opts)
@doc "See `hide/2`."
def hide(js, opts) when is_list(opts) do
opts = validate_keys(opts, :hide, [:to, :transition, :time])
transition = transition_class_names(opts[:transition])
time = opts[:time] || @default_transition_time
put_op(js, "hide", %{
to: opts[:to],
transition: transition,
time: time
})
end
@doc """
Adds classes to elements.
* `names` - The string of classes to add.
## Options
* `:to` - The optional DOM selector to add classes to.
Defaults to the interacted element.
* `:transition` - The string of classes to apply before adding classes or
a 3-tuple containing the transition class, the class to apply
to start the transition, and the ending transition class, such as:
`{"ease-out duration-300", "opacity-0", "opacity-100"}`
* `:time` - The time to apply the transition from `:transition`.
Defaults #{@default_transition_time}
## Examples
<div id="item">My Item</div>
<button phx-click={JS.add_class("highlight underline", to: "#item")}>
highlight!
</button>
"""
def add_class(names) when is_binary(names), do: add_class(%JS{}, names, [])
@doc "See `add_class/1`."
def add_class(%JS{} = js, names) when is_binary(names) do
add_class(js, names, [])
end
def add_class(names, opts) when is_binary(names) and is_list(opts) do
add_class(%JS{}, names, opts)
end
@doc "See `add_class/1`."
def add_class(%JS{} = js, names, opts) when is_binary(names) and is_list(opts) do
opts = validate_keys(opts, :add_class, [:to, :transition, :time])
time = opts[:time] || @default_transition_time
put_op(js, "add_class", %{
to: opts[:to],
names: class_names(names),
transition: transition_class_names(opts[:transition]),
time: time
})
end
@doc """
Removes classes from elements.
* `names` - The string of classes to remove.
## Options
* `:to` - The optional DOM selector to remove classes from.
Defaults to the interacted element.
* `:transition` - The string of classes to apply before removing classes or
a 3-tuple containing the transition class, the class to apply
to start the transition, and the ending transition class, such as:
`{"ease-out duration-300", "opacity-0", "opacity-100"}`
* `:time` - The time to apply the transition from `:transition`.
Defaults #{@default_transition_time}
## Examples
<div id="item">My Item</div>
<button phx-click={JS.remove_class("highlight underline", to: "#item")}>
remove highlight!
</button>
"""
def remove_class(names) when is_binary(names), do: remove_class(%JS{}, names, [])
@doc "See `remove_class/1`."
def remove_class(%JS{} = js, names) when is_binary(names) do
remove_class(js, names, [])
end
def remove_class(names, opts) when is_binary(names) and is_list(opts) do
remove_class(%JS{}, names, opts)
end
@doc "See `remove_class/1`."
def remove_class(%JS{} = js, names, opts) when is_binary(names) and is_list(opts) do
opts = validate_keys(opts, :remove_class, [:to, :transition, :time])
time = opts[:time] || @default_transition_time
put_op(js, "remove_class", %{
to: opts[:to],
names: class_names(names),
transition: transition_class_names(opts[:transition]),
time: time
})
end
@doc """
Transitions elements.
* `transition` - The string of classes to apply before removing classes or
a 3-tuple containing the transition class, the class to apply
to start the transition, and the ending transition class, such as:
`{"ease-out duration-300", "opacity-0", "opacity-100"}`
Transitions are useful for temporarily adding an animation class
to element(s), such as for highlighting content changes.
## Options
* `:to` - The optional DOM selector to apply transitions to.
Defaults to the interacted element.
* `:time` - The time to apply the transition from `:transition`.
Defaults #{@default_transition_time}
## Examples
<div id="item">My Item</div>
<button phx-click={JS.transition("shake", to: "#item")}>Shake!</button>
"""
def transition(transition) when is_binary(transition) or is_tuple(transition) do
transition(%JS{}, transition, [])
end
@doc "See `transition/1`."
def transition(transition, opts)
when (is_binary(transition) or is_tuple(transition)) and is_list(opts) do
transition(%JS{}, transition, opts)
end
def transition(%JS{} = js, transition) when is_binary(transition) or is_tuple(transition) do
transition(js, transition, [])
end
@doc "See `transition/1`."
def transition(%JS{} = js, transition, opts)
when (is_binary(transition) or is_tuple(transition)) and is_list(opts) do
opts = validate_keys(opts, :transition, [:to, :time])
time = opts[:time] || @default_transition_time
put_op(js, "transition", %{
time: time,
to: opts[:to],
transition: transition_class_names(transition)
})
end
@doc """
Sets an attribute on elements.
Accepts a tuple containing the string attribute name/value pair.
## Options
* `:to` - The optional DOM selector to add attributes to.
Defaults to the interacted element.
## Examples
<button phx-click={JS.set_attribute({"aria-expanded", "true"}, to: "#dropdown")}>
show
</button>
"""
def set_attribute({attr, val}), do: set_attribute(%JS{}, {attr, val}, [])
@doc "See `set_attribute/1`."
def set_attribute({attr, val}, opts) when is_list(opts),
do: set_attribute(%JS{}, {attr, val}, opts)
def set_attribute(%JS{} = js, {attr, val}), do: set_attribute(js, {attr, val}, [])
@doc "See `set_attribute/1`."
def set_attribute(%JS{} = js, {attr, val}, opts) when is_list(opts) do
opts = validate_keys(opts, :set_attribute, [:to])
put_op(js, "set_attr", %{to: opts[:to], attr: [attr, val]})
end
@doc """
Removes an attribute from elements.
* `attr` - The string attribute name to remove.
## Options
* `:to` - The optional DOM selector to remove attributes from.
Defaults to the interacted element.
## Examples
<button phx-click={JS.remove_attribute("aria-expanded", to: "#dropdown")}>
hide
</button>
"""
def remove_attribute(attr), do: remove_attribute(%JS{}, attr, [])
@doc "See `remove_attribute/1`."
def remove_attribute(attr, opts) when is_list(opts),
do: remove_attribute(%JS{}, attr, opts)
def remove_attribute(%JS{} = js, attr), do: remove_attribute(js, attr, [])
@doc "See `remove_attribute/1`."
def remove_attribute(%JS{} = js, attr, opts) when is_list(opts) do
opts = validate_keys(opts, :remove_attribute, [:to])
put_op(js, "remove_attr", %{to: opts[:to], attr: attr})
end
defp put_op(%JS{ops: ops} = js, kind, %{} = args) do
%JS{js | ops: ops ++ [[kind, args]]}
end
defp class_names(nil), do: []
defp class_names(names) do
String.split(names, " ")
end
defp transition_class_names(nil), do: [[], [], []]
defp transition_class_names(transition) when is_binary(transition),
do: [class_names(transition), [], []]
defp transition_class_names({transition, tstart, tend})
when is_binary(tstart) and is_binary(transition) and is_binary(tend) do
[class_names(transition), class_names(tstart), class_names(tend)]
end
defp validate_keys(opts, kind, allowed_keys) do
for key <- Keyword.keys(opts) do
if key not in allowed_keys do
raise ArgumentError, """
invalid option for #{kind}
Expected keys to be one of #{inspect(allowed_keys)}, got: #{inspect(key)}
"""
end
end
opts
end
defp put_value(opts) do
case Keyword.fetch(opts, :value) do
{:ok, val} when is_map(val) -> Keyword.put(opts, :value, val)
{:ok, val} -> raise ArgumentError, "push :value expected to be a map, got: #{inspect(val)}"
:error -> opts
end
end
defp put_target(opts) do
case Keyword.fetch(opts, :target) do
{:ok, %Phoenix.LiveComponent.CID{cid: cid}} -> Keyword.put(opts, :target, cid)
{:ok, selector} -> Keyword.put(opts, :target, selector)
:error -> opts
end
end
end
|
lib/phoenix_live_view/js.ex
| 0.909944
| 0.545588
|
js.ex
|
starcoder
|
defmodule BlueJet.Query do
@moduledoc """
This module defines some common query functions used when implementing service functions.
This module also defines the functions that a query module should implement in
order to be used with default service functions.
"""
import Ecto.Query
alias Ecto.Query
@callback default() :: Query.t()
@callback get_by(query :: Query.t(), identifiers :: map) :: Query.t()
@callback filter_by(query :: Query.t(), filter :: map) :: Query.t()
@callback search(
query :: Query.t(),
keyword :: String.t(),
locale :: String.t(),
default_locale :: String.t()
) :: Query.t()
@callback preloads(path :: tuple, opts :: map) :: keyword
@spec for_account(Query.t(), String.t() | nil) :: Query.t()
def for_account(query, nil) do
from q in query, where: is_nil(q.account_id)
end
def for_account(query, account_id) do
from q in query, where: q.account_id == ^account_id
end
@spec sort_by(Query.t(), keyword(atom)) :: Query.t()
def sort_by(query, sort) do
from q in query, order_by: ^sort
end
@spec sort_by(Query.t(), keyword(integer)) :: Query.t()
def paginate(query, size: size, number: number) do
limit = size
offset = size * (number - 1)
query
|> limit(^limit)
|> offset(^offset)
end
@spec id_only(Query.t()) :: Query.t()
def id_only(query) do
from r in query, select: r.id
end
@spec lock_exclusively(Query.t()) :: Query.t()
def lock_exclusively(query) do
lock(query, "FOR UPDATE")
end
@spec filter_by(Query.t(), map, [atom]) :: Query.t()
def filter_by(query, filter, filterable_fields) do
filter = Map.take(filter, filterable_fields)
Enum.reduce(filter, query, fn({k, v}, acc_query) ->
cond do
is_list(v) ->
from q in acc_query, where: field(q, ^k) in ^v
is_nil(v) ->
from q in acc_query, where: is_nil(field(q, ^k))
true ->
from q in acc_query, where: field(q, ^k) == ^v
end
end)
end
# TODO: change order of param to match spec
@spec search(Query.t(), String.t(), [atom]) :: Query.t()
def search(query, keyword, columns), do: search_default_locale(query, keyword, columns)
@spec search(Query.t(), String.t(), String.t(), String.t(), [atom], [atom]) :: Query.t()
def search(query, nil, _, _, _, _), do: query
def search(query, "", _, _, _, _), do: query
def search(query, keyword, locale, default_locale, columns, _) when is_nil(locale) or (locale == default_locale) do
search_default_locale(query, keyword, columns)
end
def search(query, keyword, locale, _, columns, translatable_columns) do
search_translations(query, keyword, locale, columns, translatable_columns)
end
defp search_default_locale(query, keyword, columns) do
keyword = "%#{keyword}%"
Enum.reduce(columns, query, fn(column, query) ->
from q in query, or_where: ilike(fragment("?::varchar", field(q, ^column)), ^keyword)
end)
end
defp search_translations(query, keyword, locale, columns, translatable_columns) do
keyword = "%#{keyword}%"
Enum.reduce(columns, query, fn(column, query) ->
if Enum.member?(translatable_columns, column) do
column = Atom.to_string(column)
from q in query, or_where: ilike(fragment("?->?->>?", q.translations, ^locale, ^column), ^keyword)
else
from q in query, or_where: ilike(fragment("?::varchar", field(q, ^column)), ^keyword)
end
end)
end
end
|
lib/blue_jet/core/query.ex
| 0.670069
| 0.53783
|
query.ex
|
starcoder
|
defmodule ControlNode.Host.SSH do
@moduledoc """
"""
require Logger
@enforce_keys [:host, :port, :user, :private_key_dir]
defstruct host: nil,
port: 22,
epmd_port: 4369,
user: nil,
private_key_dir: nil,
conn: nil,
hostname: nil,
via_ssh_agent: false
@typedoc """
SSH spec defines a host which shall be used to connect and deploy releases. Following fields should be
defined
* `:host` : Remote host uri (eg. `server1.somehost.com`)
* `:port` : SSH port for connecting to the server (default `22`)
* `:epmd_port` : Port where EPMD is expected to be running on `host` (default `4369`)
* `:user` : SSH user name
* `:private_key_dir` : Path to the `.ssh` folder (eg. `/home/user/.ssh`)
* `via_ssh_agent`: Use SSH Agent for authentication (default `false`)
"""
@type t :: %__MODULE__{
host: binary,
port: integer,
epmd_port: integer,
user: binary,
private_key_dir: binary,
conn: :ssh.connection_ref(),
hostname: binary,
via_ssh_agent: boolean
}
@timeout :infinity
defmodule ExecStatus do
@moduledoc false
@type t :: %__MODULE__{exit_status: atom, exit_code: integer, message: list}
defstruct exit_status: nil, exit_code: nil, message: []
end
alias __MODULE__
@doc """
Connect to SSH host
"""
@spec connect(t) :: t
def connect(ssh_spec) do
with {:ok, connection_ref} <- connect_host(ssh_spec) do
%{ssh_spec | conn: connection_ref}
end
end
# TODO: start checking if `conn: nil` is passed and only then connect
@spec connect_host(t) :: {:ok, :ssh.connection_ref()} | {:error, term()}
defp connect_host(ssh_config) do
ssh_options = [
{:user, to_list(ssh_config.user)},
{:user_dir, to_list(ssh_config.private_key_dir)},
{:user_interaction, false},
{:silently_accept_hosts, true},
{:auth_methods, 'publickey'}
]
ssh_options =
if ssh_config.via_ssh_agent do
[{:key_cb, {:ssh_agent, []}} | ssh_options]
else
ssh_options
end
ssh_config.host
|> to_list()
|> :ssh.connect(ssh_config.port, ssh_options)
end
@doc """
Closes SSH connection to remote host
"""
def disconnect(%{conn: nil} = ssh_config), do: ssh_config
def disconnect(%{conn: conn} = ssh_config) do
try do
:ok = :ssh.close(conn)
catch
error, msg ->
Logger.error("Failed while disconnecting SSH connection #{inspect({error, msg})}")
end
%{ssh_config | conn: nil}
end
@doc """
Establishes SSH tunnel on given `port` i.e. tunnel from `localhost:port` to `remote_host:port`.
`ssh_config` defines the remote host
"""
@spec tunnel_port_to_server(t, :inet.port_number()) ::
{:ok, :inet.port_number()} | {:error, any}
def tunnel_port_to_server(ssh_config, port) do
tunnel_port_to_server(ssh_config, port, port)
end
@doc """
Establishes SSH tunnel on from `localhost:local_port` to `remote_host:remote_port`
`ssh_config` defines the remote host
"""
@spec tunnel_port_to_server(t, :inet.port_number(), :inet.port_number()) ::
{:ok, :inet.port_number()} | {:error, any}
def tunnel_port_to_server(%{conn: nil}, _local_port, _remote_port), do: {:error, :not_connected}
def tunnel_port_to_server(%SSH{conn: conn} = _ssh_config, local_port, remote_port) do
:ssh.tcpip_tunnel_to_server(conn, '127.0.0.1', local_port, '127.0.0.1', remote_port)
end
@doc """
Execute a given list of command or a bash script on the host VM
`skip_eof` : For commands which start long running processes `skip_eof` should
be set to `true`. This enable `exec` to return `ExecStatus` while the command
is left running on host.
"""
@spec exec(t, list | binary) :: {:ok, ExecStatus.t()} | :failure | {:error, any}
def exec(ssh_config, commands, skip_eof \\ false) do
do_exec(ssh_config, commands, skip_eof)
end
defp do_exec(ssh_config, commands, skip_eof) when is_list(commands) do
do_exec(ssh_config, Enum.join(commands, "; "), skip_eof)
end
defp do_exec(ssh_config, script, skip_eof) when is_binary(script) do
with {:ok, conn} <- connect_host(ssh_config),
{:ok, channel_id} <- :ssh_connection.session_channel(conn, @timeout),
:success <- :ssh_connection.exec(conn, channel_id, to_list(script), @timeout) do
status = get_exec_status(conn, %ExecStatus{}, skip_eof)
:ssh_connection.close(conn, channel_id)
:ssh.close(conn)
{:ok, status}
end
end
defp get_exec_status(conn, status, skip_eof) do
receive do
{:ssh_cm, ^conn, {:closed, _channel_id}} ->
%{status | message: Enum.reverse(status.message)}
{:ssh_cm, ^conn, {:data, _channel_id, 0, success_msg}} ->
get_exec_status(conn, %{status | message: [success_msg | status.message]}, skip_eof)
{:ssh_cm, ^conn, {:data, _channel_id, 1, error_msg}} ->
get_exec_status(conn, %{status | message: [error_msg | status.message]}, skip_eof)
{:ssh_cm, ^conn, {:exit_status, _channel_id, 0}} ->
if skip_eof do
%{status | exit_status: :success, exit_code: 0}
else
get_exec_status(conn, %{status | exit_status: :success, exit_code: 0}, skip_eof)
end
{:ssh_cm, ^conn, {:exit_status, _channel_id, status_code}} ->
get_exec_status(conn, %{status | exit_status: :failure, exit_code: status_code}, skip_eof)
{:ssh_cm, ^conn, {:eof, _channel_id}} ->
get_exec_status(conn, status, skip_eof)
end
end
@doc """
Uploads `tar_file` to the `host` server via SSH and stores it at `file_path`
on the remote server.
`file_path` should be absolute path on the remote server.
`file_path` is created recursively in case it doesn't exist.
## Example
iex> ssh_config = %SSH{host: "remote-host.com", port: 22, user: "username", private_key_dir: "/home/local_user/.ssh"}
iex> ControlNode.Host.SSH.upload_file(ssh_config, "/opt/remote/server/directory", "file_contexts_binary")
:ok
"""
@spec upload_file(t, binary, binary) :: :ok
def upload_file(%__MODULE__{port: port} = ssh_config, file_path, tar_file)
when is_integer(port) do
with :ok <- is_absolute_path?(file_path) do
do_upload_file(ssh_config, file_path, tar_file)
:ok
end
end
defp do_upload_file(ssh_config, file_path, tar_file) do
filename = :binary.bin_to_list(file_path)
path = Path.dirname(file_path)
# ensure path exists
with {:ok, conn} <- connect_host(ssh_config),
{:ok, channel_pid} = :ssh_sftp.start_channel(conn) do
^path = do_make_path(channel_pid, path)
:ssh.close(conn)
end
# upload file
with {:ok, conn} <- connect_host(ssh_config),
{:ok, channel_pid} = :ssh_sftp.start_channel(conn) do
:ok = :ssh_sftp.write_file(channel_pid, filename, tar_file)
:ssh.close(conn)
end
end
defp is_absolute_path?(path) do
case Path.type(path) do
:absolute -> :ok
_ -> {:error, :absolute_path_not_provided}
end
end
defp do_make_path(channel_pid, path) do
Path.relative_to(path, "/")
|> Path.split()
|> Enum.reduce("/", fn dir, base_path ->
new_base_path = Path.join(base_path, dir)
# ensure directory path uptil now is created
:ssh_sftp.opendir(channel_pid, to_list(new_base_path))
|> case do
{:ok, _} ->
:ok
{:error, :no_such_file} ->
:ok = :ssh_sftp.make_dir(channel_pid, to_list(new_base_path))
end
new_base_path
end)
end
defp to_list(bin), do: :binary.bin_to_list(bin)
end
|
lib/control_node/host/ssh.ex
| 0.660501
| 0.433262
|
ssh.ex
|
starcoder
|
defmodule Game.Config do
@moduledoc """
Hold Config to not query as often
"""
alias Data.Config
alias Data.Repo
alias Data.Save
alias Data.Stats
@color_config %{
color_home_header: "#268bd2",
color_home_link: "#268bd2",
color_home_link_hover: "#31b5ff",
color_home_primary: "#268bd2",
color_home_primary_hover: "#2a99e7",
color_home_primary_text: "#fff",
color_home_secondary: "#fdf6e3",
color_home_secondary_hover: "#fcf1d5",
color_home_secondary_text: "#657b83",
color_background: "#002b36",
color_text_color: "#93a1a1",
color_panel_border: "#073642",
color_prompt_background: "#fdf6e3",
color_prompt_color: "#586e75",
color_character_info_background: "#073642",
color_character_info_text: "#93a1a1",
color_room_info_background: "#073642",
color_room_info_text: "#93a1a1",
color_room_info_exit: "#93a1a1",
color_stat_block_background: "#eee8d5",
color_health_bar: "#dc322f",
color_health_bar_background: "#fdf6e3",
color_skill_bar: "#859900",
color_skill_bar_background: "#fdf6e3",
color_endurance_bar: "#268bd2",
color_endurance_bar_background: "#fdf6e3",
color_experience_bar: "#6c71c4",
color_black: "#003541",
color_red: "#dc322f",
color_green: "#859900",
color_yellow: "#b58900",
color_blue: "#268bd2",
color_magenta: "#d33682",
color_cyan: "#2aa198",
color_white: "#eee8d5",
color_map_default: "#9cb7ba",
color_map_blue: "#005fd7",
color_map_brown: "#875f00",
color_map_dark_green: "#005f00",
color_map_green: "#00af00",
color_map_grey: "#7e9693",
color_map_light_grey: "#b6c6c6"
}
@basic_stats %{
health_points: 50,
max_health_points: 50,
skill_points: 50,
max_skill_points: 50,
endurance_points: 20,
max_endurance_points: 20,
strength: 10,
agility: 10,
vitality: 10,
intelligence: 10,
awareness: 10
}
@doc false
def start_link() do
Agent.start_link(fn -> %{} end, name: __MODULE__)
end
def color_config(), do: @color_config
@doc """
Reload a config from the database
"""
@spec reload(String.t()) :: any()
def reload(name) do
value = Config.find_config(name)
Agent.update(__MODULE__, &Map.put(&1, name, value))
value
end
def find_config(name) do
case Agent.get(__MODULE__, &Map.get(&1, name, nil)) do
nil ->
reload(name)
value ->
value
end
end
def host() do
ExVenture.config(Application.get_env(:ex_venture, :networking)[:host])
end
def port() do
ExVenture.config(Application.get_env(:ex_venture, :networking)[:port])
end
def ssl?(), do: ssl_port() != nil
def ssl_port() do
port = Keyword.get(Application.get_env(:ex_venture, :networking), :ssl_port, nil)
ExVenture.config(port)
end
@doc """
Number of "ticks" before regeneration occurs
"""
@spec regen_tick_count(Integer.t()) :: Integer.t()
def regen_tick_count(default) do
case find_config("regen_tick_count") do
nil ->
default
regen_tick_count ->
regen_tick_count |> Integer.parse() |> elem(0)
end
end
@doc """
The Game's name
Used in web page titles
"""
@spec game_name(String.t()) :: String.t()
def game_name(default \\ "ExVenture") do
case find_config("game_name") do
nil ->
default
game_name ->
game_name
end
end
@doc """
Message of the Day
Used during sign in
"""
@spec motd(String.t()) :: String.t()
def motd(default \\ "Welcome to ExVenture") do
case find_config("motd") do
nil ->
default
motd ->
motd
end
end
@doc """
Message after signing into the game
Used during sign in
"""
@spec after_sign_in_message(String.t()) :: String.t()
def after_sign_in_message(default \\ "") do
case find_config("after_sign_in_message") do
nil ->
default
motd ->
motd
end
end
@doc """
Starting save
Which room, etc the player will start out with
"""
@spec starting_save() :: map()
def starting_save() do
case find_config("starting_save") do
nil ->
nil
save ->
{:ok, save} = Save.load(Poison.decode!(save))
save
end
end
def starting_room_ids() do
case find_config("starting_room_ids") do
nil ->
[]
room_ids ->
Poison.decode!(room_ids)
end
end
def starting_room_id() do
starting_room_ids()
|> Enum.shuffle()
|> List.first()
end
@doc """
Your pool of random character names to offer to players signing up
"""
@spec character_names() :: [String.t()]
def character_names() do
case find_config("character_names") do
nil ->
[]
names ->
names
|> String.split("\n")
|> Enum.map(&String.trim/1)
end
end
@doc """
Pick a random set of 5 names
"""
@spec random_character_names() :: [String.t()]
def random_character_names() do
character_names()
|> Enum.shuffle()
|> Enum.take(5)
end
@doc """
Remove a name from the list of character names if it was used
"""
@spec claim_character_name(String.t()) :: :ok
def claim_character_name(name) do
case name in character_names() do
true ->
_claim_character_name(name)
false ->
:ok
end
end
defp _claim_character_name(name) do
case Repo.get_by(Config, name: "character_names") do
nil ->
:ok
config ->
names = List.delete(character_names(), name)
changeset = config |> Config.changeset(%{value: Enum.join(names, "\n")})
Repo.update(changeset)
reload("character_names")
end
end
@doc """
Load the game's basic stats
"""
@spec basic_stats() :: map()
def basic_stats() do
case find_config("basic_stats") do
nil ->
@basic_stats
stats ->
{:ok, stats} = Stats.load(Poison.decode!(stats))
stats
end
end
def discord_client_id() do
find_config("discord_client_id")
end
def discord_invite_url() do
find_config("discord_invite_url")
end
Enum.each(@color_config, fn {config, default} ->
def unquote(config)() do
case find_config(to_string(unquote(config))) do
nil ->
unquote(default)
color ->
color
end
end
end)
end
|
lib/game/config.ex
| 0.713032
| 0.419707
|
config.ex
|
starcoder
|
defmodule WebSockex.ApplicationError do
@moduledoc false
defexception [:reason]
def message(%__MODULE__{reason: :not_started}) do
"""
The :websockex application is not started.
Please start the applications with Application.ensure_all_started(:websockex)
"""
end
end
defmodule WebSockex.ConnError do
@moduledoc false
defexception [:original]
def message(%__MODULE__{original: :nxdomain}), do: "Connection Error: Could not resolve domain name."
def message(%__MODULE__{original: error}), do: "Connection Error: #{inspect error}"
end
defmodule WebSockex.RequestError do
defexception [:code, :message]
def message(%__MODULE__{code: code, message: message}) do
"Didn't get a proper response from the server. The response was: #{inspect code} #{inspect message}"
end
end
defmodule WebSockex.URLError do
@moduledoc false
defexception [:url]
def message(%__MODULE__{url: url}), do: "Invalid URL: #{inspect url}"
end
defmodule WebSockex.HandshakeError do
@moduledoc false
defexception [:challenge, :response]
def message(%__MODULE__{challenge: challenge, response: response}) do
["Handshake Failed: Response didn't match challenge.",
"Response: #{inspect response}",
"Challenge: #{inspect challenge}"]
|> Enum.join("\n")
end
end
defmodule WebSockex.BadResponseError do
@moduledoc false
defexception [:response, :module, :function, :args]
def message(%__MODULE__{} = error) do
"Bad Response: Got #{inspect error.response} from #{inspect Exception.format_mfa(error.module, error.function, error.args)}"
end
end
defmodule WebSockex.FrameError do
defexception [:reason, :opcode, :buffer]
def message(%__MODULE__{reason: :nonfin_control_frame} = exception) do
"Fragmented Control Frame: Control Frames Can't Be Fragmented\nbuffer: #{exception.buffer}"
end
def message(%__MODULE__{reason: :control_frame_too_large} = exception) do
"Control Frame Too Large: Control Frames Can't Be Larger Than 125 Bytes\nbuffer: #{exception.buffer}"
end
def message(%__MODULE__{reason: :invalid_utf8} = exception) do
"Invalid UTF-8: Text and Close frames must have UTF-8 payloads.\nbuffer: #{exception.buffer}"
end
def message(%__MODULE__{reason: :invalid_close_code} = exception) do
"Invalid Close Code: Close Codes must be in range of 1000 through 4999\nbuffer: #{exception.buffer}"
end
def message(%__MODULE__{} = exception) do
"Frame Error: #{inspect exception}"
end
end
defmodule WebSockex.FrameEncodeError do
defexception [:reason, :frame_type, :frame_payload, :close_code]
def message(%__MODULE__{reason: :control_frame_too_large} = error) do
"""
Control frame payload too large: Payload must be less than 126 bytes.
Frame: {#{inspect error.frame_type}, #{inspect error.frame_payload}}
"""
end
def message(%__MODULE__{reason: :close_code_out_of_range} = error) do
"""
Close Code Out of Range: Close code must be between 1000-4999.
Frame: {#{inspect error.frame_type}, #{inspect error.close_code}, #{inspect error.frame_payload}}
"""
end
end
defmodule WebSockex.InvalidFrameError do
defexception [:frame]
def message(%__MODULE__{frame: frame}) do
"The following frame is an invalid frame: #{inspect frame}"
end
end
defmodule WebSockex.FragmentParseError do
defexception [:reason, :fragment, :continuation]
def message(%__MODULE__{reason: :two_start_frames} = error) do
"""
Cannot Add Another Start Frame to a Existing Fragment.
Fragment: #{inspect error.fragment}
Continuation: #{inspect error.continuation}
"""
end
end
defmodule WebSockex.NotConnectedError do
defexception [:connection_state]
def message(%__MODULE__{connection_state: :opening}) do
"Not Connected: Currently Opening the Connection."
end
end
|
lib/websockex/errors.ex
| 0.699562
| 0.404272
|
errors.ex
|
starcoder
|
defmodule ExPaint.Image do
@moduledoc false
@type t :: pid
use GenServer
alias ExPaint.{Color, Font}
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, opts)
end
def destroy(pid) do
GenServer.stop(pid)
end
def dimensions(pid) do
GenServer.call(pid, :dimensions)
end
def clear(pid) do
GenServer.call(pid, :clear)
end
def line(pid, {_x1, _y1} = p1, {_x2, _y2} = p2, %Color{} = color) do
%ExPaint.Line{p1: p1, p2: p2, color: color}
|> send_to(pid)
end
def rect(pid, {_x1, _y1} = p, {_w, _h} = size, %Color{} = color) do
%ExPaint.Rect{p: p, size: size, color: color}
|> send_to(pid)
end
def filled_rect(pid, {_x1, _y1} = p, {_w, _h} = size, %Color{} = color) do
%ExPaint.FilledRect{p: p, size: size, color: color}
|> send_to(pid)
end
def filled_ellipse(pid, {_x1, _y1} = p, {_w, _h} = size, %Color{} = color) do
%ExPaint.FilledEllipse{p: p, size: size, color: color}
|> send_to(pid)
end
def filled_triangle(pid, {_x1, _y1} = p1, {_x2, _y2} = p2, {_x3, _y3} = p3, %Color{} = color) do
%ExPaint.FilledTriangle{p1: p1, p2: p2, p3: p3, color: color}
|> send_to(pid)
end
def polygon(pid, points, %Color{} = color) do
%ExPaint.Polygon{points: points, color: color}
|> send_to(pid)
end
def arc(pid, {_x1, _y1} = p1, {_x2, _y2} = p2, diam, %Color{} = color) do
%ExPaint.Arc{p1: p1, p2: p2, diam: diam, color: color}
|> send_to(pid)
end
def text(pid, {_x1, _y1} = p, %Font{} = font, text, %Color{} = color) do
%ExPaint.Text{p: p, font: font, text: text, color: color}
|> send_to(pid)
end
def primitives(pid) do
GenServer.call(pid, :primitives)
end
defp send_to(primitive, pid) do
GenServer.call(pid, {:add_primitive, primitive})
end
def init(opts) do
with {:width, width} when is_number(width) <- {:width, Keyword.get(opts, :width)},
{:height, height} when is_number(height) <- {:height, Keyword.get(opts, :height)} do
{:ok, %{width: width, height: height, primitives: []}}
else
{missing, _} -> {:stop, "Missing or invalid value for #{missing}"}
end
end
def handle_call(:clear, _from, state) do
{:reply, :ok, %{state | primitives: []}}
end
def handle_call(:dimensions, _from, %{width: width, height: height} = state) do
{:reply, {width, height}, state}
end
def handle_call({:add_primitive, primitive}, _from, state) do
{:reply, :ok, Map.update!(state, :primitives, fn primitives -> [primitive | primitives] end)}
end
def handle_call(:primitives, _from, %{primitives: primitives} = state) do
{:reply, Enum.reverse(primitives), state}
end
end
|
lib/ex_paint/image.ex
| 0.779364
| 0.449272
|
image.ex
|
starcoder
|
defmodule PhoenixChannelClient do
@moduledoc """
Phoenix Channels Client
### Example
```
{:ok, pid} = PhoenixChannelClient.start_link()
{:ok, socket} = PhoenixChannelClient.connect(pid,
host: "localhost",
path: "/socket/websocket",
params: %{token: "something"},
secure: false)
channel = PhoenixChannelClient.channel(socket, "room:public", %{name: "Ryo"})
case PhoenixChannelClient.join(channel) do
{:ok, %{message: message}} -> IO.puts(message)
{:error, %{reason: reason}} -> IO.puts(reason)
:timeout -> IO.puts("timeout")
{:exception, error} -> raise error
end
case PhoenixChannelClient.push_and_receive(channel, "search", %{query: "Elixir"}, 100) do
{:ok, %{result: result}} -> IO.puts("#\{length(result)} items")
{:error, %{reason: reason}} -> IO.puts(reason)
:timeout -> IO.puts("timeout")
{:exception, error} -> raise error
end
receive do
{"new_msg", message} -> IO.puts(message)
:close -> IO.puts("closed")
{:error, error} -> ()
end
:ok = PhoenixChannelClient.leave(channel)
```
"""
use GenServer
defmodule Channel do
defstruct [:socket, :topic, :params]
end
defmodule Socket do
defstruct [:server_name]
end
defmodule Subscription do
defstruct [:name, :pid, :matcher, :mapper]
end
alias Elixir.Socket.Web, as: WebSocket
@type channel :: %Channel{}
@type socket :: %Socket{}
@type subscription :: %Subscription{}
@type ok_result :: {:ok, term}
@type error_result :: {:error, term}
@type timeout_result :: :timeout
@type exception_result :: {:error, term}
@type result :: ok_result | error_result | timeout_result | exception_result
@type send_result :: :ok | {:error, term}
@type connect_error :: {:error, term}
@default_timeout 5000
@max_timeout 60000 # 1 minute
@phoenix_vsn "1.0.0"
@event_join "phx_join"
@event_reply "phx_reply"
@event_leave "phx_leave"
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, :ok, opts)
end
def start(opts \\ []) do
GenServer.start(__MODULE__, :ok, opts)
end
@doc """
Connects to the specified websocket.
### Options
* `:host`
* `:port` optional
* `:path` optional, "/" by default
* `:params` optional, %{} by default
* `:secure` optional, false by default
### Example
```
PhoenixChannelClient.connect(pid,
host: "localhost",
path: "/socket/websocket",
params: %{token: "something"},
secure: false)
```
"""
@spec connect(term, keyword) :: {:ok, socket} | connect_error
def connect(name, opts) do
case GenServer.call(name, {:connect, opts}) do
:ok -> {:ok, %Socket{server_name: name}}
{:error, reason} -> {:error, reason}
end
end
@doc """
Reconnects to the socket.
"""
@spec reconnect(socket) :: :ok | connect_error
def reconnect(socket) do
GenServer.call(socket.server_name, :reconnect)
end
@doc """
Creates a channel struct.
"""
@spec channel(socket, String.t, map) :: channel
def channel(socket, topic, params \\ %{}) do
%Channel{
socket: socket,
topic: topic,
params: params
}
end
@doc """
Joins to the channel and subscribes messages.
Receives `{event, payload}` or `:close`.
### Example
```
case PhoenixChannelClient.join(channel) do
{:ok, %{message: message}} -> IO.puts(message)
{:error, %{reason: reason}} -> IO.puts(reason)
:timeout -> IO.puts("timeout")
end
receive do
{"new_msg", message} -> IO.puts(message)
:close -> IO.puts("closed")
{:error, error} -> ()
end
```
"""
@spec join(channel, number) :: result
def join(channel, timeout \\ @default_timeout) do
subscription = channel_subscription_key(channel)
matcher = fn %{topic: topic} ->
topic === channel.topic
end
mapper = fn %{event: event, payload: payload} -> {event, payload} end
subscribe(channel.socket.server_name, subscription, matcher, mapper)
case push_and_receive(channel, @event_join, channel.params, timeout) do
:timeout ->
unsubscribe(channel.socket.server_name, subscription)
:timeout
x -> x
end
end
@doc """
Leaves the channel.
"""
@spec leave(channel, number) :: send_result
def leave(channel, timeout \\ @default_timeout) do
subscription = channel_subscription_key(channel)
unsubscribe(channel.socket.server_name, subscription)
push_and_receive(channel, @event_leave, %{}, timeout)
end
@doc """
Pushes a message.
### Example
```
case PhoenixChannelClient.push(channel, "new_msg", %{text: "Hello"}, 100) do
:ok -> ()
{:error, term} -> IO.puts("failed")
end
```
"""
@spec push(channel, String.t, map) :: send_result
def push(channel, event, payload) do
ref = GenServer.call(channel.socket.server_name, :make_ref)
do_push(channel, event, payload, ref)
end
@doc """
Pushes a message and receives a reply.
### Example
```
case PhoenixChannelClient.push_and_receive(channel, "search", %{query: "Elixir"}, 100) do
{:ok, %{result: result}} -> IO.puts("#\{length(result)} items")
{:error, %{reason: reason}} -> IO.puts(reason)
:timeout -> IO.puts("timeout")
end
```
"""
@spec push_and_receive(channel, String.t, map, number) :: result
def push_and_receive(channel, event, payload, timeout \\ @default_timeout) do
ref = GenServer.call(channel.socket.server_name, :make_ref)
subscription = reply_subscription_key(ref)
task = Task.async(fn ->
matcher = fn %{topic: topic, event: event, ref: msg_ref} ->
topic === channel.topic and event === @event_reply and msg_ref === ref
end
mapper = fn %{payload: payload} -> payload end
subscribe(channel.socket.server_name, subscription, matcher, mapper)
case do_push(channel, event, payload, ref) do
:ok ->
receive do
payload ->
case payload do
%{"status" => "ok", "response" => response} ->
{:ok, response}
%{"status" => "error", "response" => response} ->
{:error, response}
end
after
timeout -> :timeout
end
{:error, error} -> {:exception, error}
end
end)
try do
Task.await(task, @max_timeout)
after
unsubscribe(channel.socket.server_name, subscription)
end
end
defp do_push(channel, event, payload, ref) do
obj = %{
topic: channel.topic,
event: event,
payload: payload,
ref: ref
}
json = Poison.encode!(obj)
socket = GenServer.call(channel.socket.server_name, :socket)
WebSocket.send(socket, {:text, json})
end
defp subscribe(name, key, matcher, mapper) do
subscription = %Subscription{name: key, matcher: matcher, mapper: mapper, pid: self()}
GenServer.cast(name, {:subscribe, subscription})
subscription
end
defp unsubscribe(name, %Subscription{name: key}) do
unsubscribe(name, key)
end
defp unsubscribe(name, key) do
GenServer.cast(name, {:unsubscribe, key})
end
defp channel_subscription_key(channel), do: "channel_#{channel.topic}"
defp reply_subscription_key(ref), do: "reply_#{ref}"
defp do_connect(address, opts, state) do
socket = state.socket
if not is_nil(socket) do
WebSocket.close(socket)
end
ensure_loop_killed(state)
case WebSocket.connect(address, opts) do
{:ok, socket} ->
state = schedule_heartbeat(state)
pid = spawn_recv_loop(socket)
state = %{state |
socket: socket,
recv_loop_pid: pid}
{:reply, :ok, state}
{:error, error} ->
{:reply, {:error, error}, state}
end
end
defp schedule_heartbeat(state) do
ref = Process.send_after(self(), :heartbeat, state.heartbeat_interval)
%{state | heartbeat_ref: ref}
end
@sleep_time_on_error 100
defp spawn_recv_loop(socket) do
pid = self()
spawn_link(fn ->
for _ <- Stream.cycle([:ok]) do
case WebSocket.recv(socket) do
{:ok, {:text, data}} ->
send pid, {:text, data}
{:ok, {:ping, _}} ->
WebSocket.send!(socket, {:pong, ""})
{:ok, {:close, _, _}} ->
send pid, :close
{:ok, {:pong, _}} ->
:noop
{:error, error} ->
send pid, {:error, error}
:timer.sleep(@sleep_time_on_error)
end
end
end)
end
def ensure_loop_killed(state) do
ref = state.heartbeat_ref
if not is_nil(ref) do
Process.cancel_timer(ref)
end
pid = state.recv_loop_pid
if not is_nil(pid) do
Process.unlink(pid)
Process.exit(pid, :kill)
end
end
# Callbacks
def init(_opts) do
initial_state = %{
ref: 0,
socket: nil,
recv_loop_pid: nil,
subscriptions: %{},
connection_address: nil,
connection_opts: nil,
heartbeat_interval: nil,
heartbeat_ref: nil
}
{:ok, initial_state}
end
def handle_call({:connect, opts}, _from, state) do
{host, opts} = Keyword.pop(opts, :host)
{port, opts} = Keyword.pop(opts, :port)
{path, opts} = Keyword.pop(opts, :path, "/")
{params, opts} = Keyword.pop(opts, :params, %{})
{heartbeat_interval, opts} = Keyword.pop(opts, :heartbeat_interval, 30_000)
params = Map.put(params, :vsn, @phoenix_vsn) |> URI.encode_query()
path = "#{path}?#{params}"
opts = Keyword.put(opts, :path, path)
address = if not is_nil(port) do
{host, port}
else
host
end
state = %{state |
connection_address: address,
connection_opts: opts,
heartbeat_interval: heartbeat_interval}
do_connect(address, opts, state)
end
def handle_call(:reconnect, _from, state) do
%{
connection_address: address,
connection_opts: opts
} = state
do_connect(address, opts, state)
end
def handle_call(:make_ref, _from, state) do
ref = state.ref
state = Map.update!(state, :ref, &(&1 + 1))
{:reply, ref, state}
end
def handle_call(:socket, _from, state) do
{:reply, state.socket, state}
end
def handle_cast({:subscribe, subscription}, state) do
state = put_in(state, [:subscriptions, subscription.name], subscription)
{:noreply, state}
end
def handle_cast({:unsubscribe, key}, state) do
state = Map.update!(state, :subscriptions, fn subscriptions ->
Map.delete(subscriptions, key)
end)
{:noreply, state}
end
def handle_info({:text, json}, state) do
%{
"event" => event,
"topic" => topic,
"payload" => payload,
"ref" => ref
} = Poison.decode!(json)
obj = %{
event: event,
topic: topic,
payload: payload,
ref: ref
}
filter = fn {_key, %Subscription{matcher: matcher}} ->
matcher.(obj)
end
mapper = fn {_key, %Subscription{pid: pid, mapper: mapper}} ->
{pid, mapper.(obj)}
end
sender = fn {pid, message} ->
send pid, message
end
state.subscriptions
|> Flow.from_enumerable()
|> Flow.filter(filter)
|> Flow.map(mapper)
|> Flow.map(sender)
|> Flow.run()
{:noreply, state}
end
def handle_info(:close, state) do
ensure_loop_killed(state)
Enum.map(state.subscriptions, fn {_key, %Subscription{pid: pid}} ->
spawn_link(fn ->
send pid, :close
end)
end)
{:noreply, state}
end
def handle_info({:error, error}, state) do
Enum.map(state.subscriptions, fn {_key, %Subscription{pid: pid}} ->
spawn_link(fn ->
send pid, {:error, error}
end)
end)
{:noreply, state}
end
def handle_info(:heartbeat, state) do
Elixir.Socket.Web.send!(state.socket, {:ping, Poison.encode!(%{topic: "phoenix", event: "heartbeat", payload: %{}})})
state = schedule_heartbeat(state)
{:noreply, state}
end
def terminate(reason, state) do
ensure_loop_killed(state)
socket = state.socket
if not is_nil(socket) do
WebSocket.abort(socket)
end
reason
end
end
|
lib/phoenix_channel_client.ex
| 0.902166
| 0.59749
|
phoenix_channel_client.ex
|
starcoder
|
defmodule Mix.Tasks.Compile.Erlang do
alias :epp, as: Epp
alias :digraph, as: Graph
alias :digraph_utils, as: GraphUtils
use Mix.Task
@hidden true
@shortdoc "Compile Erlang source files"
@recursive true
@manifest ".compile.erlang"
@moduledoc """
A task to compile Erlang source files.
When this task runs, it will first check the modification times of
all files to be compiled and if they haven't been
changed since the last compilation, it will not compile
them. If any of them have changed, it compiles
everything.
For this reason, the task touches your `:compile_path`
directory and sets the modification time to the current
time and date at the end of each compilation. You can
force compilation regardless of modification times by passing
the `--force` option.
## Command line options
* `--force` - forces compilation regardless of modification times
## Configuration
* `ERL_COMPILER_OPTIONS` - can be used to give default compile options.
The value must be a valid Erlang term. If the value is a list, it will
be used as is. If it is not a list, it will be put into a list.
* `:erlc_paths` - directories to find source files.
Defaults to `["src"]`, can be configured as:
```
[erlc_paths: ["src", "other"]]
```
* `:erlc_include_path` - directory for adding include files.
Defaults to `"include"`, can be configured as:
```
[erlc_include_path: "other"]
```
* `:erlc_options` - compilation options that apply to Erlang's
compiler. `:debug_info` is enabled by default.
There are many available options here:
http://www.erlang.org/doc/man/compile.html#file-2
"""
defrecord Erl, file: nil, module: nil, behaviours: [], compile: [],
includes: [], mtime: nil, invalid: false
@doc """
Runs this task.
"""
def run(args) do
{ opts, _, _ } = OptionParser.parse(args, switches: [force: :boolean])
project = Mix.project
source_paths = project[:erlc_paths]
files = Mix.Utils.extract_files(source_paths, [:erl])
compile_path = to_erl_file project[:compile_path]
include_path = to_erl_file project[:erlc_include_path]
erlc_options = project[:erlc_options] || []
erlc_options = erlc_options ++ [{:outdir, compile_path}, {:i, include_path}, :report]
erlc_options = Enum.map erlc_options, fn
{ kind, dir } when kind in [:i, :outdit] ->
{ kind, to_erl_file(dir) }
opt ->
opt
end
tuples = files
|> scan_sources(include_path, source_paths)
|> sort_dependencies
|> Enum.map(annotate_target(&1, compile_path, opts[:force]))
compile_mappings(manifest(), tuples, fn
input, _output ->
file = to_erl_file(Path.rootname(input, ".erl"))
:compile.file(file, erlc_options)
end)
end
@doc """
Returns Erlang manifests.
"""
def manifests, do: [manifest]
defp manifest, do: Path.join(Mix.project[:compile_path], @manifest)
@doc """
Extracts the extensions from the mappings, automatically
invoking the callback for each stale input and output pair
(or for all if `force` is true) and removing files that no
longer have a source, while keeping the manifest up
to date.
## Examples
For example, a simple compiler for Lisp Flavored Erlang
would be implemented like:
compile_mappings "ebin/.compile.lfe",
[{ "src", "ebin" }],
:lfe, :beam, opts[:force], fn
input, output ->
lfe_comp:file(to_erl_file(input),
[output_dir: Path.dirname(output)])
end
The command above will:
1. Look for files ending with the `lfe` extension in `src`
and their `beam` counterpart in `ebin`;
2. For each stale file (or for all if `force` is true),
invoke the callback passing the calculated input
and output;
3. Update the manifest with the newly compiled outputs;
4. Remove any output in the manifest that that does not
have an equivalent source;
The callback must return `{ :ok, mod }` or `:error` in case
of error. An error is raised at the end if any of the
files failed to compile.
"""
def compile_mappings(manifest, mappings, src_ext, dest_ext, force, callback) do
files = lc { src, dest } inlist mappings do
extract_targets(src, src_ext, dest, dest_ext, force)
end |> Enum.concat
compile_mappings(manifest, files, callback)
end
@doc """
Converts the given file to a format accepted by
the Erlang compilation tools.
"""
def to_erl_file(file) do
to_char_list(file)
end
## Internal helpers
defp scan_sources(files, include_path, source_paths) do
include_paths = [include_path | source_paths]
Enum.reduce(files, [], scan_source(&2, &1, include_paths)) |> Enum.reverse
end
defp scan_source(acc, file, include_paths) do
erl_file = Erl[file: file, module: module_from_artifact(file)]
case Epp.parse_file(to_erl_file(file), include_paths, []) do
{ :ok, forms } ->
[List.foldl(tl(forms), erl_file, do_form(file, &1, &2)) | acc]
{ :error, _error } ->
acc
end
end
defp do_form(file, form, Erl[] = erl) do
case form do
{:attribute, _, :file, {include_file, _}} when file != include_file ->
if File.regular?(include_file) do
erl.update_includes [include_file|&1]
else
erl
end
{:attribute, _, :behaviour, behaviour} ->
erl.update_behaviours [behaviour|&1]
{:attribute, _, :compile, value} ->
erl.update_compile [value|&1]
_ ->
erl
end
end
defp sort_dependencies(erls) do
graph = Graph.new
lc erl inlist erls do
Graph.add_vertex(graph, erl.module, erl)
end
lc erl inlist erls do
lc b inlist erl.behaviours, do: Graph.add_edge(graph, b, erl.module)
lc c inlist erl.compile do
case c do
{:parse_transform, transform} -> Graph.add_edge(graph, transform, erl.module)
_ -> :ok
end
end
end
result =
case GraphUtils.topsort(graph) do
false -> erls
mods ->
lc m inlist mods, do: elem(Graph.vertex(graph, m), 1)
end
Graph.delete(graph)
result
end
defp annotate_target(erl, compile_path, force) do
beam = Path.join(compile_path, "#{erl.module}#{:code.objfile_extension}")
if force || Mix.Utils.stale?([erl.file|erl.includes], [beam]) do
{ erl.file, erl.module, beam }
else
{ erl.file, erl.module, nil }
end
end
defp module_from_artifact(artifact) do
artifact |> Path.basename |> Path.rootname
end
defp extract_targets(dir1, src_ext, dir2, dest_ext, force) do
files = Mix.Utils.extract_files([dir1], List.wrap(src_ext))
lc file inlist files do
module = module_from_artifact(file)
target = Path.join(dir2, module <> "." <> to_string(dest_ext))
if force || Mix.Utils.stale?([file], [target]) do
{ file, module, target }
else
{ file, module, nil }
end
end
end
defp compile_mappings(manifest, tuples, callback) do
# Stale files are the ones with a destination
stale = lc { src, _mod, dest } inlist tuples, dest != nil, do: { src, dest }
# Get the previous entries from the manifest
entries = Mix.Utils.read_manifest(manifest)
# Files to remove are the ones in the
# manifest but they no longer have a source
removed = Enum.filter(entries, fn entry ->
module = module_from_artifact(entry)
not Enum.any?(tuples, fn { _src, mod, _dest } -> module == mod end)
end)
if stale == [] && removed == [] do
:noop
else
File.mkdir_p!(Path.dirname(manifest))
# Remove manifest entries with no source
Enum.each(removed, File.rm(&1))
# Compile stale files and print the results
results = lc { input, output } inlist stale do
interpret_result(input, callback.(input, output))
end
# Write final entries to manifest
entries = (entries -- removed) ++ Enum.map(stale, elem(&1, 1))
Mix.Utils.write_manifest(manifest, :lists.usort(entries))
# Raise if any error, return :ok otherwise
if Enum.any?(results, &1 == :error), do: raise CompileError
:ok
end
end
defp interpret_result(file, result) do
case result do
{ :ok, _ } -> Mix.shell.info "Compiled #{file}"
:error -> :error
end
result
end
end
|
lib/mix/lib/mix/tasks/compile.erlang.ex
| 0.846831
| 0.619327
|
compile.erlang.ex
|
starcoder
|
defmodule Xema.JsonSchema do
@moduledoc """
Converts a JSON Schema to Xema source.
"""
alias Xema.{
JsonSchema.Validator,
Schema,
SchemaError
}
@type json_schema :: true | false | map
@type opts :: [draft: String.t()]
@drafts ~w(draft4 draft6 draft7)
@schema ~w(
additional_items
additional_properties
property_names
not
if
then
else
contains
items
)a
@schemas ~w(
all_of
any_of
one_of
items
)a
@schema_map ~w(
definitions
pattern_properties
properties
)a
@keywords Schema.keywords()
|> Enum.map(&to_string/1)
|> ConvCase.to_camel_case()
|> List.delete("ref")
|> List.delete("schema")
|> Enum.concat(["$ref", "$id", "$schema"])
@doc """
This function converts a JSON Schema in Xema schema source. The argument
`json_schema` is expected as a decoded JSON Schema.
All keys that are not standard JSON Schema keywords have to be known atoms. If
the schema has additional keys that are unknown atoms the option
`atom: :force` is needed. In this case the atoms will be created. This is not
needed for keys expected by JSON Schema (e.g. in properties)
Options:
* `:draft` specifies the draft to check the given JSON Schema. Possible values
are `"draft4"`, `"draft6"`, and `"draft7"`, default is `"draft7"`. If
`:draft` not set and the schema contains `$schema` then the value for
`$schema` is used for this option.
* `:atoms` creates atoms for unknown atoms when set to `:force`. This is just
needed for additional JSON Schema keywords.
## Examples
iex> Xema.JsonSchema.to_xema(%{"type" => "integer", "minimum" => 5})
{:integer, [minimum: 5]}
iex> schema = %{
...> "type" => "object",
...> "properties" => %{"foo" => %{"type" => "integer"}}
...> }
iex> Xema.JsonSchema.to_xema(schema)
{:map, [properties: %{"foo" => :integer}]}
iex> Xema.JsonSchema.to_xema(%{"type" => "integer", "foo" => "bar"}, atom: :force)
{:integer, [foo: "bar"]}
"""
@spec to_xema(json_schema, opts) :: atom | tuple
def to_xema(json_schema, opts \\ []) do
draft = draft(json_schema, opts)
case Validator.validate(draft, json_schema) do
:ok ->
do_to_xema(json_schema, opts)
{:error, :unknown} ->
raise "unknown draft #{inspect(draft)}, has to be one of #{inspect(@drafts)}"
{:error, reason} ->
raise SchemaError, reason
end
end
defp do_to_xema(json, opts) when is_map(json) do
{type, json} = type(json)
case Enum.empty?(json) do
true -> type
false -> {type, schema(json, opts)}
end
end
defp do_to_xema(json, _) when is_boolean(json), do: json
defp type(map) do
{type, map} = Map.pop(map, "type", :any)
{type_to_atom(type), map}
end
defp draft(json_schema, opts) when is_map(json_schema) do
draft = Map.get(json_schema, "$schema", "draft7")
Keyword.get(opts, :draft, draft)
end
defp draft(_json_schema, opts), do: Keyword.get(opts, :draft, "draft7")
defp type_to_atom(list) when is_list(list), do: Enum.map(list, &type_to_atom/1)
defp type_to_atom("object"), do: :map
defp type_to_atom("array"), do: :list
defp type_to_atom("null"), do: nil
defp type_to_atom(type) when is_binary(type), do: to_existing_atom(type)
defp type_to_atom(type), do: type
defp schema(json, opts) do
json
|> Enum.map(&rule(&1, opts))
|> Keyword.new()
end
# handles all rules with a regular keyword
defp rule({key, value}, opts) when key in @keywords do
key
|> String.trim_leading("$")
|> ConvCase.to_snake_case()
|> to_existing_atom(opts)
|> rule(value, opts)
end
# handles all rules without a regular keyword
defp rule({key, value}, opts) when is_binary(key) and is_map(value) do
value =
case schema?(value) do
true -> do_to_xema(value, opts)
false -> schema(value, opts)
end
{to_existing_atom(key, opts), value}
end
defp rule({key, value}, opts), do: {to_existing_atom(key, opts), value}
defp rule(:format, value, _) do
{:format, value |> ConvCase.to_snake_case() |> to_existing_atom()}
end
defp rule(:dependencies, value, opts) do
value =
Enum.into(value, %{}, fn
{key, value} when is_map(value) -> {key, do_to_xema(value, opts)}
{key, value} -> {key, value}
end)
{:dependencies, value}
end
defp rule(key, value, opts) when key in @schema_map do
{key, Enum.into(value, %{}, fn {key, value} -> {key, do_to_xema(value, opts)} end)}
end
defp rule(key, value, opts) when key in @schemas and is_list(value) do
{key, Enum.map(value, &do_to_xema(&1, opts))}
end
defp rule(key, value, opts) when key in @schema do
{key, do_to_xema(value, opts)}
end
defp rule(key, value, _), do: {key, value}
defp schema?(value) do
value |> Map.keys() |> Enum.any?(fn key -> Enum.member?(@keywords, key) end)
end
defp to_existing_atom(str, opts \\ []) do
case Keyword.get(opts, :atom, :existing) do
:existing -> String.to_existing_atom(str)
:force -> String.to_atom(str)
end
rescue
_ ->
reraise SchemaError,
"All additional schema keys must be existing atoms. Missing atom for #{str}",
__STACKTRACE__
end
end
|
lib/xema/json_schema.ex
| 0.896457
| 0.436562
|
json_schema.ex
|
starcoder
|
defmodule Absinthe.Type.Enum do
@moduledoc """
Used to define an enum type, a special scalar that can only have a defined set
of values.
See the `t` type below for details and examples.
## Examples
Given a type defined as the following (see `Absinthe.Schema.Notation`):
```
@desc "The selected color channel"
enum :color_channel do
value :red, as: :r, description: "Color Red"
value :green, as: :g, description: "Color Green"
value :blue, as: :b, description: "Color Blue"
value :alpha, as: :a, deprecate: "We no longer support opacity settings", description: "Alpha Channel"
end
```
The "ColorChannel" type (referred inside Absinthe as `:color_channel`) is an
Enum type, with values with names "red", "green", "blue", and "alpha" that map
to internal, raw values `:r`, `:g`, `:b`, and `:a`. The alpha color channel
is deprecated, just as fields and arguments can be.
You can omit the raw `value` if you'd like it to be the same as the
identifier. For instance, in this example the `value` is automatically set to
`:red`:
```
enum :color_channel do
description "The selected color channel"
value :red, description: "Color Red"
value :green, description: "Color Green"
value :blue, description: "Color Blue"
value :alpha, deprecate: "We no longer support opacity settings", description: "Alpha Channel"
end
```
If you really want to use a shorthand, skipping support for descriptions,
custom raw values, and deprecation, you can just provide a list of atoms:
```
enum :color_channel, values: [:red, :green, :blue, :alpha]
```
Keep in mind that writing a terse definition that skips descriptions and
deprecations today may hamper tooling that relies on introspection tomorrow.
"""
use Absinthe.Introspection.Kind
alias Absinthe.{Blueprint, Type}
@typedoc """
A defined enum type.
Should be defined using `Absinthe.Schema.Notation.enum/2`.
* `:name` - The name of the enum type. Should be a TitleCased `binary`. Set automatically.
* `:description` - A nice description for introspection.
* `:values` - The enum values, usually provided using the `Absinthe.Schema.Notation.values/1` or `Absinthe.Schema.Notation.value/1` macro.
The `__private__` and `:__reference__` fields are for internal use.
"""
@type t :: %__MODULE__{
name: binary,
description: binary,
values: %{binary => Type.Enum.Value.t()},
identifier: atom,
__private__: Keyword.t(),
__reference__: Type.Reference.t()
}
defstruct name: nil,
description: nil,
identifier: nil,
values: %{},
values_by_internal_value: %{},
values_by_name: %{},
__private__: [],
__reference__: nil
def build(%{attrs: attrs}) do
raw_values = attrs[:values] || []
values = Type.Enum.Value.build(raw_values)
internal_values = Type.Enum.Value.build(raw_values, :value)
values_by_name = Type.Enum.Value.build(raw_values, :name)
attrs =
attrs
|> Keyword.put(:values, values)
|> Keyword.put(:values_by_internal_value, internal_values)
|> Keyword.put(:values_by_name, values_by_name)
quote do
%unquote(__MODULE__){
unquote_splicing(attrs)
}
end
end
# Get the internal representation of an enum value
@doc false
@spec parse(t, any) :: any
def parse(enum, %Blueprint.Input.Enum{value: external_value}) do
Map.fetch(enum.values_by_name, external_value)
end
def parse(_, _) do
:error
end
# Get the external representation of an enum value
@doc false
@spec serialize(t, any) :: binary
def serialize(enum, internal_value) do
Map.fetch!(enum.values_by_internal_value, internal_value).name
end
end
|
lib/absinthe/type/enum.ex
| 0.925559
| 0.937153
|
enum.ex
|
starcoder
|
defmodule WHATWG.Infra do
@moduledoc """
Functions for [Infra Standard](https://infra.spec.whatwg.org).
"""
@doc """
Returns `true` if `term` is an integer for a surrogate; otherwise returns `false`.
> A **surrogate** is a code point that is in the range U+D800 to U+DFFF, inclusive.
"""
defguard is_surrogate(term) when term in 0xD800..0xDFFF
@doc """
Returns `true` if `term` is an integer for a scalar value; otherwise returns `false`.
> A **scalar value** is a code point that is not a surrogate.
"""
defguard is_scalar_value(term) when not is_surrogate(term)
@doc """
Returns `true` if `term` is an integer for a noncharacter; otherwise returns `false`.
> A **noncharacter** is a code point that is in the range U+FDD0 to U+FDEF, inclusive, or U+FFFE, U+FFFF, U+1FFFE, U+1FFFF, U+2FFFE, U+2FFFF, U+3FFFE, U+3FFFF, U+4FFFE, U+4FFFF, U+5FFFE, U+5FFFF, U+6FFFE, U+6FFFF, U+7FFFE, U+7FFFF, U+8FFFE, U+8FFFF, U+9FFFE, U+9FFFF, U+AFFFE, U+AFFFF, U+BFFFE, U+BFFFF, U+CFFFE, U+CFFFF, U+DFFFE, U+DFFFF, U+EFFFE, U+EFFFF, U+FFFFE, U+FFFFF, U+10FFFE, or U+10FFFF.
"""
defguard is_noncharacter(term)
when term in 0xFDD0..0xFDEF or
term in [
0xFFFE,
0xFFFF,
0x1FFFE,
0x1FFFF,
0x2FFFE,
0x2FFFF,
0x3FFFE,
0x3FFFF,
0x4FFFE,
0x4FFFF,
0x5FFFE,
0x5FFFF,
0x6FFFE,
0x6FFFF,
0x7FFFE,
0x7FFFF,
0x8FFFE,
0x8FFFF,
0x9FFFE,
0x9FFFF,
0xAFFFE,
0xAFFFF,
0xBFFFE,
0xBFFFF,
0xCFFFE,
0xCFFFF,
0xDFFFE,
0xDFFFF,
0xEFFFE,
0xEFFFF,
0xFFFFE,
0xFFFFF,
0x10FFFE,
0x10FFFF
]
@doc """
Returns `true` if `term` is an integer for an ASCII code point; otherwise returns `false`.
> An **ASCII code point** is a code point in the range U+0000 NULL to U+007F DELETE, inclusive.
"""
defguard is_ascii_code_point(term) when term in 0x00..0x7F
@doc """
Returns `true` if `term` is an integer for an ASCII tab or newline; otherwise returns `false`.
> An **ASCII tab or newline** is U+0009 TAB, U+000A LF, or U+000D CR.
"""
defguard is_ascii_tab_or_newline(term) when term in [0x09, 0x0A, 0x0D]
@doc """
Returns `true` if `term` is an integer for ASCII whitespace; otherwise returns `false`.
> **ASCII whitespace** is U+0009 TAB, U+000A LF, U+000C FF, U+000D CR, or U+0020 SPACE.
"""
defguard is_ascii_whitespace(term) when term in [0x09, 0x0A, 0x0C, 0x0D, 0x20]
@doc """
Returns `true` if `term` is an integer for a C0 control; otherwise returns `false`.
> A **C0 control** is a code point in the range U+0000 NULL to U+001F INFORMATION SEPARATOR ONE, inclusive.
"""
defguard is_c0_control(term) when term in 0x00..0x1F
@doc """
Returns `true` if `term` is an integer for an U+0020 SPACE; otherwise returns `false`.
"""
defguard is_space(term) when term == 0x20
@doc """
Returns `true` if `term` is an integer for a control; otherwise returns `false`.
> A **control** is a C0 control or a code point in the range U+007F DELETE to U+009F APPLICATION PROGRAM COMMAND, inclusive.
"""
defguard is_control(term) when is_c0_control(term) or term in 0x007F..0x009F
@doc """
Returns `true` if `term` is an integer for an ASCII digit; otherwise returns `false`.
> An **ASCII digit** is a code point in the range U+0030 (0) to U+0039 (9), inclusive.
"""
defguard is_ascii_digit(term) when term in '0123456789'
@doc """
Returns `true` if `term` is an integer for an ASCII upper hex digit; otherwise returns `false`.
> An **ASCII upper hex digit** is an ASCII digit or a code point in the range U+0041 (A) to U+0046 (F), inclusive.
"""
defguard is_ascii_upper_hex_digit(term) when term in 'ABCDEF'
@doc """
Returns `true` if `term` is an integer for an ASCII lower hex digit; otherwise returns `false`.
> An **ASCII lower hex digit** is an ASCII digit or a code point in the range U+0061 (a) to U+0066 (f), inclusive.
"""
defguard is_ascii_lower_hex_digit(term) when term in 'abcdef'
@doc """
Returns `true` if `term` is an integer for an ASCII hex digit; otherwise returns `false`.
> An **ASCII hex digit** is an ASCII upper hex digit or ASCII lower hex digit.
"""
defguard is_ascii_hex_digit(term)
when is_ascii_upper_hex_digit(term) or is_ascii_lower_hex_digit(term)
@doc """
Returns `true` if `term` is an integer for an ASCII upper alpha; otherwise returns `false`.
> An **ASCII upper alpha** is a code point in the range U+0041 (A) to U+005A (Z), inclusive.
"""
defguard is_ascii_upper_alpha(term) when term in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
@doc """
Returns `true` if `term` is an integer for an ASCII lower alpha; otherwise returns `false`.
> An **ASCII lower alpha** is a code point in the range U+0061 (a) to U+007A (z), inclusive.
"""
defguard is_ascii_lower_alpha(term) when term in 'abcdefghijklmnopqrstuvwxyz'
@doc """
Returns `true` if `term` is an integer for an ASCII alpha; otherwise returns `false`.
> An **ASCII alpha** is an ASCII upper alpha or ASCII lower alpha.
"""
defguard is_ascii_alpha(term) when is_ascii_upper_alpha(term) or is_ascii_lower_alpha(term)
@doc """
Returns `true` if `term` is an integer for an ASCII alphanumeric; otherwise returns `false`.
> An **ASCII alphanumeric** is an ASCII digit or ASCII alpha.
"""
defguard is_ascii_alphanumeric(term) when is_ascii_digit(term) or is_ascii_alpha(term)
end
|
lib/whatwg/infra.ex
| 0.906275
| 0.618939
|
infra.ex
|
starcoder
|
defmodule Faker.Name.En do
import Faker, only: [sampler: 2]
@moduledoc """
Functions for name data in English
"""
@doc """
Returns a complete name (may include a suffix/prefix or both)
## Examples
iex> Faker.Name.En.name()
"Mrs. <NAME> MD"
iex> Faker.Name.En.name()
"<NAME>"
iex> Faker.Name.En.name()
"Mr. <NAME>"
iex> Faker.Name.En.name()
"<NAME> MD"
"""
@spec name() :: String.t()
def name, do: name(Faker.random_between(0, 9))
defp name(0), do: "#{prefix()} #{first_name()} #{last_name()} #{suffix()}"
defp name(1), do: "#{prefix()} #{first_name()} #{last_name()}"
defp name(2), do: "#{first_name()} #{last_name()} #{suffix()}"
defp name(n) when is_integer(n) do
"#{first_name()} #{last_name()}"
end
@doc """
Returns a random first name
## Examples
iex> Faker.Name.En.first_name()
"Joany"
iex> Faker.Name.En.first_name()
"Elizabeth"
iex> Faker.Name.En.first_name()
"Abe"
iex> Faker.Name.En.first_name()
"Ozella"
"""
@spec first_name() :: String.t()
sampler(:first_name, [
"Aaliyah",
"Aaron",
"Abagail",
"Abbey",
"Abbie",
"Abbigail",
"Abby",
"Abdiel",
"Abdul",
"Abdullah",
"Abe",
"Abel",
"Abelardo",
"Abigail",
"Abigale",
"Abigayle",
"Abner",
"Abraham",
"Ada",
"Adah",
"Adalberto",
"Adaline",
"Adam",
"Adan",
"Addie",
"Addison",
"Adela",
"Adelbert",
"Adele",
"Adelia",
"Adeline",
"Adell",
"Adella",
"Adelle",
"Aditya",
"Adolf",
"Adolfo",
"Adolph",
"Adolphus",
"Adonis",
"Adrain",
"Adrian",
"Adriana",
"Adrianna",
"Adriel",
"Adrien",
"Adrienne",
"Afton",
"Aglae",
"Agnes",
"Agustin",
"Agustina",
"Ahmad",
"Ahmed",
"Aida",
"Aidan",
"Aiden",
"Aileen",
"Aimee",
"Aisha",
"Aiyana",
"Akeem",
"Al",
"Alaina",
"Alan",
"Alana",
"Alanis",
"Alanna",
"Alayna",
"Alba",
"Albert",
"Alberta",
"Albertha",
"Alberto",
"Albin",
"Albina",
"Alda",
"Alden",
"Alec",
"Aleen",
"Alejandra",
"Alejandrin",
"Alek",
"Alena",
"Alene",
"Alessandra",
"Alessandro",
"Alessia",
"Aletha",
"Alex",
"Alexa",
"Alexander",
"Alexandra",
"Alexandre",
"Alexandrea",
"Alexandria",
"Alexandrine",
"Alexandro",
"Alexane",
"Alexanne",
"Alexie",
"Alexis",
"Alexys",
"Alexzander",
"Alf",
"Alfonso",
"Alfonzo",
"Alford",
"Alfred",
"Alfreda",
"Alfredo",
"Ali",
"Alia",
"Alice",
"Alicia",
"Alisa",
"Alisha",
"Alison",
"Alivia",
"Aliya",
"Aliyah",
"Aliza",
"Alize",
"Allan",
"Allen",
"Allene",
"Allie",
"Allison",
"Ally",
"Alphonso",
"Alta",
"Althea",
"Alva",
"Alvah",
"Alvena",
"Alvera",
"Alverta",
"Alvina",
"Alvis",
"Alyce",
"Alycia",
"Alysa",
"Alysha",
"Alyson",
"Alysson",
"Amalia",
"Amanda",
"Amani",
"Amara",
"Amari",
"Amaya",
"Amber",
"Ambrose",
"Amelia",
"Amelie",
"Amely",
"America",
"Americo",
"Amie",
"Amina",
"Amir",
"Amira",
"Amiya",
"Amos",
"Amparo",
"Amy",
"Amya",
"Ana",
"Anabel",
"Anabelle",
"Anahi",
"Anais",
"Anastacio",
"Anastasia",
"Anderson",
"Andre",
"Andreane",
"Andreanne",
"Andres",
"Andrew",
"Andy",
"Angel",
"Angela",
"Angelica",
"Angelina",
"Angeline",
"Angelita",
"Angelo",
"Angie",
"Angus",
"Anibal",
"Anika",
"Anissa",
"Anita",
"Aniya",
"Aniyah",
"Anjali",
"Anna",
"Annabel",
"Annabell",
"Annabelle",
"Annalise",
"Annamae",
"Annamarie",
"Anne",
"Annetta",
"Annette",
"Annie",
"Ansel",
"Ansley",
"Anthony",
"Antoinette",
"Antone",
"Antonetta",
"Antonette",
"Antonia",
"Antonietta",
"Antonina",
"Antonio",
"Antwan",
"Antwon",
"Anya",
"April",
"Ara",
"Araceli",
"Aracely",
"Arch",
"Archibald",
"Ardella",
"Arden",
"Ardith",
"Arely",
"Ari",
"Ariane",
"Arianna",
"Aric",
"Ariel",
"Arielle",
"Arjun",
"Arlene",
"Arlie",
"Arlo",
"Armand",
"Armando",
"Armani",
"Arnaldo",
"Arne",
"Arno",
"Arnold",
"Arnoldo",
"Arnulfo",
"Aron",
"Art",
"Arthur",
"Arturo",
"Arvel",
"Arvid",
"Arvilla",
"Aryanna",
"Asa",
"Asha",
"Ashlee",
"Ashleigh",
"Ashley",
"Ashly",
"Ashlynn",
"Ashton",
"Ashtyn",
"Asia",
"Assunta",
"Astrid",
"Athena",
"Aubree",
"Aubrey",
"Audie",
"Audra",
"Audreanne",
"Audrey",
"August",
"Augusta",
"Augustine",
"Augustus",
"Aurelia",
"Aurelie",
"Aurelio",
"Aurore",
"Austen",
"Austin",
"Austyn",
"Autumn",
"Ava",
"Avery",
"Avis",
"Axel",
"Ayana",
"Ayden",
"Ayla",
"Aylin",
"Baby",
"Bailee",
"Bailey",
"Barbara",
"Barney",
"Baron",
"Barrett",
"Barry",
"Bart",
"Bartholome",
"Barton",
"Baylee",
"Beatrice",
"Beau",
"Beaulah",
"Bell",
"Bella",
"Belle",
"Ben",
"Benedict",
"Benjamin",
"Bennett",
"Bennie",
"Benny",
"Benton",
"Berenice",
"Bernadette",
"Bernadine",
"Bernard",
"Bernardo",
"Berneice",
"Bernhard",
"Bernice",
"Bernie",
"Berniece",
"Bernita",
"Berry",
"Bert",
"Berta",
"Bertha",
"Bertram",
"Bertrand",
"Beryl",
"Bessie",
"Beth",
"Bethany",
"Bethel",
"Betsy",
"Bette",
"Bettie",
"Betty",
"Bettye",
"Beulah",
"Beverly",
"Bianka",
"Bill",
"Billie",
"Billy",
"Birdie",
"Blair",
"Blaise",
"Blake",
"Blanca",
"Blanche",
"Blaze",
"Bo",
"Bobbie",
"Bobby",
"Bonita",
"Bonnie",
"Boris",
"Boyd",
"Brad",
"Braden",
"Bradford",
"Bradley",
"Bradly",
"Brady",
"Braeden",
"Brain",
"Brandi",
"Brando",
"Brandon",
"Brandt",
"Brandy",
"Brandyn",
"Brannon",
"Branson",
"Brant",
"Braulio",
"Braxton",
"Brayan",
"Breana",
"Breanna",
"Breanne",
"Brenda",
"Brendan",
"Brenden",
"Brendon",
"Brenna",
"Brennan",
"Brennon",
"Brent",
"Bret",
"Brett",
"Bria",
"Brian",
"Briana",
"Brianne",
"Brice",
"Bridget",
"Bridgette",
"Bridie",
"Brielle",
"Brigitte",
"Brionna",
"Brisa",
"Britney",
"Brittany",
"Brock",
"Broderick",
"Brody",
"Brook",
"Brooke",
"Brooklyn",
"Brooks",
"Brown",
"Bruce",
"Bryana",
"Bryce",
"Brycen",
"Bryon",
"Buck",
"Bud",
"Buddy",
"Buford",
"Bulah",
"Burdette",
"Burley",
"Burnice",
"Buster",
"Cade",
"Caden",
"Caesar",
"Caitlyn",
"Cale",
"Caleb",
"Caleigh",
"Cali",
"Calista",
"Callie",
"Camden",
"Cameron",
"Camila",
"Camilla",
"Camille",
"Camren",
"Camron",
"Camryn",
"Camylle",
"Candace",
"Candelario",
"Candice",
"Candida",
"Candido",
"Cara",
"Carey",
"Carissa",
"Carlee",
"Carleton",
"Carley",
"Carli",
"Carlie",
"Carlo",
"Carlos",
"Carlotta",
"Carmel",
"Carmela",
"Carmella",
"Carmelo",
"Carmen",
"Carmine",
"Carol",
"Carolanne",
"Carole",
"Carolina",
"Caroline",
"Carolyn",
"Carolyne",
"Carrie",
"Carroll",
"Carson",
"Carter",
"Cary",
"Casandra",
"Casey",
"Casimer",
"Casimir",
"Casper",
"Cassandra",
"Cassandre",
"Cassidy",
"Cassie",
"Catalina",
"Caterina",
"Catharine",
"Catherine",
"Cathrine",
"Cathryn",
"Cathy",
"Cayla",
"Ceasar",
"Cecelia",
"Cecil",
"Cecile",
"Cecilia",
"Cedrick",
"Celestine",
"Celestino",
"Celia",
"Celine",
"Cesar",
"Chad",
"Chadd",
"Chadrick",
"Chaim",
"Chance",
"Chandler",
"Chanel",
"Chanelle",
"Charity",
"Charlene",
"Charles",
"Charley",
"Charlie",
"Charlotte",
"Chase",
"Chasity",
"Chauncey",
"Chaya",
"Chaz",
"Chelsea",
"Chelsey",
"Chelsie",
"Chesley",
"Chester",
"Chet",
"Cheyanne",
"Cheyenne",
"Chloe",
"Chris",
"Christ",
"Christa",
"Christelle",
"Christian",
"Christiana",
"Christina",
"Christine",
"Christop",
"Christophe",
"Christopher",
"Christy",
"Chyna",
"Ciara",
"Cicero",
"Cielo",
"Cierra",
"Cindy",
"Citlalli",
"Clair",
"Claire",
"Clara",
"Clarabelle",
"Clare",
"Clarissa",
"Clark",
"Claud",
"Claude",
"Claudia",
"Claudie",
"Claudine",
"Clay",
"Clemens",
"Clement",
"Clementina",
"Clementine",
"Clemmie",
"Cleo",
"Cleora",
"Cleta",
"Cletus",
"Cleve",
"Cleveland",
"Clifford",
"Clifton",
"Clint",
"Clinton",
"Clotilde",
"Clovis",
"Cloyd",
"Clyde",
"Coby",
"Cody",
"Colby",
"Cole",
"Coleman",
"Colin",
"Colleen",
"Collin",
"Colt",
"Colten",
"Colton",
"Columbus",
"Concepcion",
"Conner",
"Connie",
"Connor",
"Conor",
"Conrad",
"Constance",
"Constantin",
"Consuelo",
"Cooper",
"Cora",
"Coralie",
"Corbin",
"Cordelia",
"Cordell",
"Cordia",
"Cordie",
"Corene",
"Corine",
"Cornelius",
"Cornell",
"Corrine",
"Cortez",
"Cortney",
"Cory",
"Coty",
"Courtney",
"Coy",
"Craig",
"Crawford",
"Creola",
"Cristal",
"Cristian",
"Cristina",
"Cristobal",
"Cristopher",
"Cruz",
"Crystal",
"Crystel",
"Cullen",
"Curt",
"Curtis",
"Cydney",
"Cynthia",
"Cyril",
"Cyrus",
"Dagmar",
"Dahlia",
"Daija",
"Daisha",
"Daisy",
"Dakota",
"Dale",
"Dallas",
"Dallin",
"Dalton",
"Damaris",
"Dameon",
"Damian",
"Damien",
"Damion",
"Damon",
"Dan",
"Dana",
"Dandre",
"Dane",
"D'angelo",
"Dangelo",
"Danial",
"Daniela",
"Daniella",
"Danielle",
"Danika",
"Dannie",
"Danny",
"Dante",
"Danyka",
"Daphne",
"Daphnee",
"Daphney",
"Darby",
"Daren",
"Darian",
"Dariana",
"Darien",
"Dario",
"Darion",
"Darius",
"Darlene",
"Daron",
"Darrel",
"Darrell",
"Darren",
"Darrick",
"Darrin",
"Darrion",
"Darron",
"Darryl",
"Darwin",
"Daryl",
"Dashawn",
"Dasia",
"Dave",
"David",
"Davin",
"Davion",
"Davon",
"Davonte",
"Dawn",
"Dawson",
"Dax",
"Dayana",
"Dayna",
"Dayne",
"Dayton",
"Dean",
"Deangelo",
"Deanna",
"Deborah",
"Declan",
"Dedric",
"Dedrick",
"Dee",
"Deion",
"Deja",
"Dejah",
"Dejon",
"Dejuan",
"Delaney",
"Delbert",
"Delfina",
"Delia",
"Delilah",
"Dell",
"Della",
"Delmer",
"Delores",
"Delpha",
"Delphia",
"Delphine",
"Delta",
"Demarco",
"Demarcus",
"Demario",
"Demetris",
"Demetrius",
"Demond",
"Dena",
"Denis",
"Dennis",
"Deon",
"Deondre",
"Deontae",
"Deonte",
"Dereck",
"Derek",
"Derick",
"Deron",
"Derrick",
"Deshaun",
"Deshawn",
"Desiree",
"Desmond",
"Dessie",
"Destany",
"Destin",
"Destinee",
"Destiney",
"Destini",
"Destiny",
"Devan",
"Devante",
"Deven",
"Devin",
"Devon",
"Devonte",
"Devyn",
"Dewayne",
"Dewitt",
"Dexter",
"Diamond",
"Diana",
"Dianna",
"Diego",
"Dillan",
"Dillon",
"Dimitri",
"Dina",
"Dino",
"Dion",
"Dixie",
"Dock",
"Dolly",
"Dolores",
"Domenic",
"Domenica",
"Domenick",
"Domenico",
"Domingo",
"Dominic",
"Dominique",
"Don",
"Donald",
"Donato",
"Donavon",
"Donna",
"Donnell",
"Donnie",
"Donny",
"Dora",
"Dorcas",
"Dorian",
"Doris",
"Dorothea",
"Dorothy",
"Dorris",
"Dortha",
"Dorthy",
"Doug",
"Douglas",
"Dovie",
"Doyle",
"Drake",
"Drew",
"Duane",
"Dudley",
"Dulce",
"Duncan",
"Durward",
"Dustin",
"Dusty",
"Dwight",
"Dylan",
"Earl",
"Earlene",
"Earline",
"Earnest",
"Earnestine",
"Easter",
"Easton",
"Ebba",
"Ebony",
"Ed",
"Eda",
"Edd",
"Eddie",
"Eden",
"Edgar",
"Edgardo",
"Edison",
"Edmond",
"Edmund",
"Edna",
"Eduardo",
"Edward",
"Edwardo",
"Edwin",
"Edwina",
"Edyth",
"Edythe",
"Effie",
"Efrain",
"Efren",
"Eileen",
"Einar",
"Eino",
"Eladio",
"Elaina",
"Elbert",
"Elda",
"Eldon",
"Eldora",
"Eldred",
"Eldridge",
"Eleanora",
"Eleanore",
"Eleazar",
"Electa",
"Elena",
"Elenor",
"Elenora",
"Eleonore",
"Elfrieda",
"Eli",
"Elian",
"Eliane",
"Elias",
"Eliezer",
"Elijah",
"Elinor",
"Elinore",
"Elisa",
"Elisabeth",
"Elise",
"Eliseo",
"Elisha",
"Elissa",
"Eliza",
"Elizabeth",
"Ella",
"Ellen",
"Ellie",
"Elliot",
"Elliott",
"Ellis",
"Ellsworth",
"Elmer",
"Elmira",
"Elmo",
"Elmore",
"Elna",
"Elnora",
"Elody",
"Eloisa",
"Eloise",
"Elouise",
"Eloy",
"Elroy",
"Elsa",
"Else",
"Elsie",
"Elta",
"Elton",
"Elva",
"Elvera",
"Elvie",
"Elvis",
"Elwin",
"Elwyn",
"Elyse",
"Elyssa",
"Elza",
"Emanuel",
"Emelia",
"Emelie",
"Emely",
"Emerald",
"Emerson",
"Emery",
"Emie",
"Emil",
"Emile",
"Emilia",
"Emiliano",
"Emilie",
"Emilio",
"Emily",
"Emma",
"Emmalee",
"Emmanuel",
"Emmanuelle",
"Emmet",
"Emmett",
"Emmie",
"Emmitt",
"Emmy",
"Emory",
"Ena",
"Enid",
"Enoch",
"Enola",
"Enos",
"Enrico",
"Enrique",
"Ephraim",
"Era",
"Eriberto",
"Eric",
"Erica",
"Erich",
"Erick",
"Ericka",
"Erik",
"Erika",
"Erin",
"Erling",
"Erna",
"Ernest",
"Ernestina",
"Ernestine",
"Ernesto",
"Ernie",
"Ervin",
"Erwin",
"Eryn",
"Esmeralda",
"Esperanza",
"Esta",
"Esteban",
"Estefania",
"Estel",
"Estell",
"Estella",
"Estelle",
"Estevan",
"Esther",
"Estrella",
"Etha",
"Ethan",
"Ethel",
"Ethelyn",
"Ethyl",
"Ettie",
"Eudora",
"Eugene",
"Eugenia",
"Eula",
"Eulah",
"Eulalia",
"Euna",
"Eunice",
"Eusebio",
"Eva",
"Evalyn",
"Evan",
"Evangeline",
"Evans",
"Eve",
"Eveline",
"Evelyn",
"Everardo",
"Everett",
"Everette",
"Evert",
"Evie",
"Ewald",
"Ewell",
"Ezekiel",
"Ezequiel",
"Ezra",
"Fabian",
"Fabiola",
"Fae",
"Fannie",
"Fanny",
"Fatima",
"Faustino",
"Fausto",
"Favian",
"Fay",
"Faye",
"Federico",
"Felicia",
"Felicita",
"Felicity",
"Felipa",
"Felipe",
"Felix",
"Felton",
"Fermin",
"Fern",
"Fernando",
"Ferne",
"Fidel",
"Filiberto",
"Filomena",
"Finn",
"Fiona",
"Flavie",
"Flavio",
"Fleta",
"Fletcher",
"Flo",
"Florence",
"Florencio",
"Florian",
"Florida",
"Florine",
"Flossie",
"Floy",
"Floyd",
"Ford",
"Forest",
"Forrest",
"Foster",
"Frances",
"Francesca",
"Francesco",
"Francis",
"Francisca",
"Francisco",
"Franco",
"Frank",
"Frankie",
"Franz",
"Fred",
"Freda",
"Freddie",
"Freddy",
"Frederic",
"Frederick",
"Frederik",
"Frederique",
"Fredrick",
"Fredy",
"Freeda",
"Freeman",
"Freida",
"Frida",
"Frieda",
"Friedrich",
"Fritz",
"Furman",
"Gabe",
"Gabriel",
"Gabriella",
"Gabrielle",
"Gaetano",
"Gage",
"Gail",
"Gardner",
"Garett",
"Garfield",
"Garland",
"Garnet",
"Garnett",
"Garret",
"Garrett",
"Garrick",
"Garrison",
"Garry",
"Garth",
"Gaston",
"Gavin",
"Gay",
"Gayle",
"Gaylord",
"Gene",
"General",
"Genesis",
"Genevieve",
"Gennaro",
"Genoveva",
"Geo",
"Geoffrey",
"George",
"Georgette",
"Georgiana",
"Georgianna",
"Geovanni",
"Geovanny",
"Geovany",
"Gerald",
"Geraldine",
"Gerard",
"Gerardo",
"Gerda",
"Gerhard",
"Germaine",
"German",
"Gerry",
"Gerson",
"Gertrude",
"Gia",
"Gianni",
"Gideon",
"Gilbert",
"Gilberto",
"Gilda",
"Giles",
"Gillian",
"Gina",
"Gino",
"Giovani",
"Giovanna",
"Giovanni",
"Giovanny",
"Gisselle",
"Giuseppe",
"Gladyce",
"Gladys",
"Glen",
"Glenda",
"Glenna",
"Glennie",
"Gloria",
"Godfrey",
"Golda",
"Golden",
"Gonzalo",
"Gordon",
"Grace",
"Gracie",
"Graciela",
"Grady",
"Graham",
"Grant",
"Granville",
"Grayce",
"Grayson",
"Green",
"Greg",
"Gregg",
"Gregoria",
"Gregorio",
"Gregory",
"Greta",
"Gretchen",
"Greyson",
"Griffin",
"Grover",
"Guadalupe",
"Gudrun",
"Guido",
"Guillermo",
"Guiseppe",
"Gunnar",
"Gunner",
"Gus",
"Gussie",
"Gust",
"Gustave",
"Guy",
"Gwen",
"Gwendolyn",
"Hadley",
"Hailee",
"Hailey",
"Hailie",
"Hal",
"Haleigh",
"Haley",
"Halie",
"Halle",
"Hallie",
"Hank",
"Hanna",
"Hannah",
"Hans",
"Hardy",
"Harley",
"Harmon",
"Harmony",
"Harold",
"Harrison",
"Harry",
"Harvey",
"Haskell",
"Hassan",
"Hassie",
"Hattie",
"Haven",
"Hayden",
"Haylee",
"Hayley",
"Haylie",
"Hazel",
"Hazle",
"Heath",
"Heather",
"Heaven",
"Heber",
"Hector",
"Heidi",
"Helen",
"Helena",
"Helene",
"Helga",
"Hellen",
"Helmer",
"Heloise",
"Henderson",
"Henri",
"Henriette",
"Henry",
"Herbert",
"Herman",
"Hermann",
"Hermina",
"Herminia",
"Herminio",
"Hershel",
"Herta",
"Hertha",
"Hester",
"Hettie",
"Hilario",
"Hilbert",
"Hilda",
"Hildegard",
"Hillard",
"Hillary",
"Hilma",
"Hilton",
"Hipolito",
"Hiram",
"Hobart",
"Holden",
"Hollie",
"Hollis",
"Holly",
"Hope",
"Horace",
"Horacio",
"Hortense",
"Hosea",
"Houston",
"Howard",
"Howell",
"Hoyt",
"Hubert",
"Hudson",
"Hugh",
"Hulda",
"Humberto",
"Hunter",
"Hyman",
"Ian",
"Ibrahim",
"Icie",
"Ida",
"Idell",
"Idella",
"Ignacio",
"Ignatius",
"Ike",
"Ila",
"Ilene",
"Iliana",
"Ima",
"Imani",
"Imelda",
"Immanuel",
"Imogene",
"Ines",
"Irma",
"Irving",
"Irwin",
"Isaac",
"Isabel",
"Isabell",
"Isabella",
"Isabelle",
"Isac",
"Isadore",
"Isai",
"Isaiah",
"Isaias",
"Isidro",
"Ismael",
"Isobel",
"Isom",
"Israel",
"Issac",
"Itzel",
"Iva",
"Ivah",
"Ivory",
"Ivy",
"Izabella",
"Izaiah",
"Jabari",
"Jace",
"Jacey",
"Jacinthe",
"Jacinto",
"Jack",
"Jackeline",
"Jackie",
"Jacklyn",
"Jackson",
"Jacky",
"Jaclyn",
"Jacquelyn",
"Jacques",
"Jacynthe",
"Jada",
"Jade",
"Jaden",
"Jadon",
"Jadyn",
"Jaeden",
"Jaida",
"Jaiden",
"Jailyn",
"Jaime",
"Jairo",
"Jakayla",
"Jake",
"Jakob",
"Jaleel",
"Jalen",
"Jalon",
"Jalyn",
"Jamaal",
"Jamal",
"Jamar",
"Jamarcus",
"Jamel",
"Jameson",
"Jamey",
"Jamie",
"Jamil",
"Jamir",
"Jamison",
"Jammie",
"Jan",
"Jana",
"Janae",
"Jane",
"Janelle",
"Janessa",
"Janet",
"Janice",
"Janick",
"Janie",
"Janis",
"Janiya",
"Jannie",
"Jany",
"Jaquan",
"Jaquelin",
"Jaqueline",
"Jared",
"Jaren",
"Jarod",
"Jaron",
"Jarred",
"Jarrell",
"Jarret",
"Jarrett",
"Jarrod",
"Jarvis",
"Jasen",
"Jasmin",
"Jason",
"Jasper",
"Jaunita",
"Javier",
"Javon",
"Javonte",
"Jay",
"Jayce",
"Jaycee",
"Jayda",
"Jayde",
"Jayden",
"Jaydon",
"Jaylan",
"Jaylen",
"Jaylin",
"Jaylon",
"Jayme",
"Jayne",
"Jayson",
"Jazlyn",
"Jazmin",
"Jazmyn",
"Jazmyne",
"Jean",
"Jeanette",
"Jeanie",
"Jeanne",
"Jed",
"Jedediah",
"Jedidiah",
"Jeff",
"Jefferey",
"Jeffery",
"Jeffrey",
"Jeffry",
"Jena",
"Jenifer",
"Jennie",
"Jennifer",
"Jennings",
"Jennyfer",
"Jensen",
"Jerad",
"Jerald",
"Jeramie",
"Jeramy",
"Jerel",
"Jeremie",
"Jeremy",
"Jermain",
"Jermaine",
"Jermey",
"Jerod",
"Jerome",
"Jeromy",
"Jerrell",
"Jerrod",
"Jerrold",
"Jerry",
"Jess",
"Jesse",
"Jessica",
"Jessie",
"Jessika",
"Jessy",
"Jessyca",
"Jesus",
"Jett",
"Jettie",
"Jevon",
"Jewel",
"Jewell",
"Jillian",
"Jimmie",
"Jimmy",
"Jo",
"Joan",
"Joana",
"Joanie",
"Joanne",
"Joannie",
"Joanny",
"Joany",
"Joaquin",
"Jocelyn",
"Jodie",
"Jody",
"Joe",
"Joel",
"Joelle",
"Joesph",
"Joey",
"Johan",
"Johann",
"Johanna",
"Johathan",
"John",
"Johnathan",
"Johnathon",
"Johnnie",
"Johnny",
"Johnpaul",
"Johnson",
"Jolie",
"Jon",
"Jonas",
"Jonatan",
"Jonathan",
"Jonathon",
"Jordan",
"Jordane",
"Jordi",
"Jordon",
"Jordy",
"Jordyn",
"Jorge",
"Jose",
"Josefa",
"Josefina",
"Joseph",
"Josephine",
"Josh",
"Joshua",
"Joshuah",
"Josiah",
"Josiane",
"Josianne",
"Josie",
"Josue",
"Jovan",
"Jovani",
"Jovanny",
"Jovany",
"Joy",
"Joyce",
"Juana",
"Juanita",
"Judah",
"Judd",
"Jude",
"Judge",
"Judson",
"Judy",
"Jules",
"Julia",
"Julian",
"Juliana",
"Julianne",
"Julie",
"Julien",
"Juliet",
"Julio",
"Julius",
"June",
"Junior",
"Junius",
"Justen",
"Justice",
"Justina",
"Justine",
"Juston",
"Justus",
"Justyn",
"Juvenal",
"Juwan",
"Kacey",
"Kaci",
"Kacie",
"Kade",
"Kaden",
"Kadin",
"Kaela",
"Kaelyn",
"Kaia",
"Kailee",
"Kailey",
"Kailyn",
"Kaitlin",
"Kaitlyn",
"Kale",
"Kaleb",
"Kaleigh",
"Kaley",
"Kali",
"Kallie",
"Kameron",
"Kamille",
"Kamren",
"Kamron",
"Kamryn",
"Kane",
"Kara",
"Kareem",
"Karelle",
"Karen",
"Kari",
"Kariane",
"Karianne",
"Karina",
"Karine",
"Karl",
"Karlee",
"Karley",
"Karli",
"Karlie",
"Karolann",
"Karson",
"Kasandra",
"Kasey",
"Kassandra",
"Katarina",
"Katelin",
"Katelyn",
"Katelynn",
"Katharina",
"Katherine",
"Katheryn",
"Kathleen",
"Kathlyn",
"Kathryn",
"Kathryne",
"Katlyn",
"Katlynn",
"Katrina",
"Katrine",
"Kattie",
"Kavon",
"Kay",
"Kaya",
"Kaycee",
"Kayden",
"Kayla",
"Kaylah",
"Kaylee",
"Kayleigh",
"Kayley",
"Kayli",
"Kaylie",
"Kaylin",
"Keagan",
"Keanu",
"Keara",
"Keaton",
"Keegan",
"Keeley",
"Keely",
"Keenan",
"Keira",
"Keith",
"Kellen",
"Kelley",
"Kelli",
"Kellie",
"Kelly",
"Kelsi",
"Kelsie",
"Kelton",
"Kelvin",
"Ken",
"Kendall",
"Kendra",
"Kendrick",
"Kenna",
"Kennedi",
"Kennedy",
"Kenneth",
"Kennith",
"Kenny",
"Kenton",
"Kenya",
"Kenyatta",
"Kenyon",
"Keon",
"Keshaun",
"Keshawn",
"Keven",
"Kevin",
"Kevon",
"Keyon",
"Keyshawn",
"Khalid",
"Khalil",
"Kian",
"Kiana",
"Kianna",
"Kiara",
"Kiarra",
"Kiel",
"Kiera",
"Kieran",
"Kiley",
"Kim",
"Kimberly",
"King",
"Kip",
"Kira",
"Kirk",
"Kirsten",
"Kirstin",
"Kitty",
"Kobe",
"Koby",
"Kody",
"Kolby",
"Kole",
"Korbin",
"Korey",
"Kory",
"Kraig",
"Kris",
"Krista",
"Kristian",
"Kristin",
"Kristina",
"Kristofer",
"Kristoffer",
"Kristopher",
"Kristy",
"Krystal",
"Krystel",
"Krystina",
"Kurt",
"Kurtis",
"Kyla",
"Kyle",
"Kylee",
"Kyleigh",
"Kyler",
"Kylie",
"Kyra",
"Lacey",
"Lacy",
"Ladarius",
"Lafayette",
"Laila",
"Laisha",
"Lamar",
"Lambert",
"Lamont",
"Lance",
"Landen",
"Lane",
"Laney",
"Larissa",
"Laron",
"Larry",
"Larue",
"Laura",
"Laurel",
"Lauren",
"Laurence",
"Lauretta",
"Lauriane",
"Laurianne",
"Laurie",
"Laurine",
"Laury",
"Lauryn",
"Lavada",
"Lavern",
"Laverna",
"Laverne",
"Lavina",
"Lavinia",
"Lavon",
"Lavonne",
"Lawrence",
"Lawson",
"Layla",
"Layne",
"Lazaro",
"Lea",
"Leann",
"Leanna",
"Leanne",
"Leatha",
"Leda",
"Lee",
"Leif",
"Leila",
"Leilani",
"Lela",
"Lelah",
"Leland",
"Lelia",
"Lempi",
"Lemuel",
"Lenna",
"Lennie",
"Lenny",
"Lenora",
"Lenore",
"Leo",
"Leola",
"Leon",
"Leonard",
"Leonardo",
"Leone",
"Leonel",
"Leonie",
"Leonor",
"Leonora",
"Leopold",
"Leopoldo",
"Leora",
"Lera",
"Lesley",
"Leslie",
"Lesly",
"Lessie",
"Lester",
"Leta",
"Letha",
"Letitia",
"Levi",
"Lew",
"Lewis",
"Lexi",
"Lexie",
"Lexus",
"Lia",
"Liam",
"Liana",
"Libbie",
"Libby",
"Lila",
"Lilian",
"Liliana",
"Liliane",
"Lilla",
"Lillian",
"Lilliana",
"Lillie",
"Lilly",
"Lily",
"Lilyan",
"Lina",
"Lincoln",
"Linda",
"Lindsay",
"Lindsey",
"Linnea",
"Linnie",
"Linwood",
"Lionel",
"Lisa",
"Lisandro",
"Lisette",
"Litzy",
"Liza",
"Lizeth",
"Lizzie",
"Llewellyn",
"Lloyd",
"Logan",
"Lois",
"Lola",
"Lolita",
"Loma",
"Lon",
"London",
"Lonie",
"Lonnie",
"Lonny",
"Lonzo",
"Lora",
"Loraine",
"Loren",
"Lorena",
"Lorenz",
"Lorenza",
"Lorenzo",
"Lori",
"Lorine",
"Lorna",
"Lottie",
"Lou",
"Louie",
"Louisa",
"Lourdes",
"Louvenia",
"Lowell",
"Loy",
"Loyal",
"Loyce",
"Lucas",
"Luciano",
"Lucie",
"Lucienne",
"Lucile",
"Lucinda",
"Lucio",
"Lucious",
"Lucius",
"Lucy",
"Ludie",
"Ludwig",
"Lue",
"Luella",
"Luigi",
"Luis",
"Luisa",
"Lukas",
"Lula",
"Lulu",
"Luna",
"Lupe",
"Lura",
"Lurline",
"Luther",
"Luz",
"Lyda",
"Lydia",
"Lyla",
"Lynn",
"Lyric",
"Lysanne",
"Mabel",
"Mabelle",
"Mable",
"Mac",
"Macey",
"Maci",
"Macie",
"Mack",
"Mackenzie",
"Macy",
"Madaline",
"Madalyn",
"Maddison",
"Madeline",
"Madelyn",
"Madelynn",
"Madge",
"Madie",
"Madilyn",
"Madisen",
"Madison",
"Madisyn",
"Madonna",
"Madyson",
"Mae",
"Maegan",
"Maeve",
"Mafalda",
"Magali",
"Magdalen",
"Magdalena",
"Maggie",
"Magnolia",
"Magnus",
"Maia",
"Maida",
"Maiya",
"Major",
"Makayla",
"Makenna",
"Makenzie",
"Malachi",
"Malcolm",
"Malika",
"Malinda",
"Mallie",
"Mallory",
"Malvina",
"Mandy",
"Manley",
"Manuel",
"Manuela",
"Mara",
"Marc",
"Marcel",
"Marcelina",
"Marcelino",
"Marcella",
"Marcelle",
"Marcellus",
"Marcelo",
"Marcia",
"Marco",
"Marcos",
"Marcus",
"Margaret",
"Margarete",
"Margarett",
"Margaretta",
"Margarette",
"Margarita",
"Marge",
"Margie",
"Margot",
"Margret",
"Marguerite",
"Maria",
"Mariah",
"Mariam",
"Marian",
"Mariana",
"Mariane",
"Marianna",
"Marianne",
"Mariano",
"Maribel",
"Marie",
"Mariela",
"Marielle",
"Marietta",
"Marilie",
"Marilou",
"Marilyne",
"Marina",
"Mario",
"Marion",
"Marisa",
"Marisol",
"Maritza",
"Marjolaine",
"Marjorie",
"Marjory",
"Mark",
"Markus",
"Marlee",
"Marlen",
"Marlene",
"Marley",
"Marlin",
"Marlon",
"Marques",
"Marquis",
"Marquise",
"Marshall",
"Marta",
"Martin",
"Martina",
"Martine",
"Marty",
"Marvin",
"Mary",
"Maryam",
"Maryjane",
"Maryse",
"Mason",
"Mateo",
"Mathew",
"Mathias",
"Mathilde",
"Matilda",
"Matilde",
"Matt",
"Matteo",
"Mattie",
"Maud",
"Maude",
"Maudie",
"Maureen",
"Maurice",
"Mauricio",
"Maurine",
"Maverick",
"Mavis",
"Max",
"Maxie",
"Maxime",
"Maximilian",
"Maximillia",
"Maximillian",
"Maximo",
"Maximus",
"Maxine",
"Maxwell",
"May",
"Maya",
"Maybell",
"Maybelle",
"Maye",
"Maymie",
"Maynard",
"Mayra",
"Mazie",
"Mckayla",
"Mckenna",
"Mckenzie",
"Meagan",
"Meaghan",
"Meda",
"Megane",
"Meggie",
"Meghan",
"Mekhi",
"Melany",
"Melba",
"Melisa",
"Melissa",
"Mellie",
"Melody",
"Melvin",
"Melvina",
"Melyna",
"Melyssa",
"Mercedes",
"Meredith",
"Merl",
"Merle",
"Merlin",
"Merritt",
"Mertie",
"Mervin",
"Meta",
"Mia",
"Micaela",
"Micah",
"Michael",
"Michaela",
"Michale",
"Micheal",
"Michel",
"Michele",
"Michelle",
"Miguel",
"Mikayla",
"Mike",
"Mikel",
"Milan",
"Miles",
"Milford",
"Miller",
"Millie",
"Milo",
"Milton",
"Mina",
"Minerva",
"Minnie",
"Miracle",
"Mireille",
"Mireya",
"Misael",
"Missouri",
"Misty",
"Mitchel",
"Mitchell",
"Mittie",
"Modesta",
"Modesto",
"Mohamed",
"Mohammad",
"Mohammed",
"Moises",
"Mollie",
"Molly",
"Mona",
"Monica",
"Monique",
"Monroe",
"Monserrat",
"Monserrate",
"Montana",
"Monte",
"Monty",
"Morgan",
"Moriah",
"Morris",
"Mortimer",
"Morton",
"Mose",
"Moses",
"Moshe",
"Mossie",
"Mozell",
"Mozelle",
"Muhammad",
"Muriel",
"Murl",
"Murphy",
"Murray",
"Mustafa",
"Mya",
"Myah",
"Mylene",
"Myles",
"Myra",
"Myriam",
"Myrl",
"Myrna",
"Myron",
"Myrtice",
"Myrtie",
"Myrtis",
"Myrtle",
"Nadia",
"Nakia",
"Name",
"Nannie",
"Naomi",
"Naomie",
"Napoleon",
"Narciso",
"Nash",
"Nasir",
"Nat",
"Natalia",
"Natalie",
"Natasha",
"Nathan",
"Nathanael",
"Nathanial",
"Nathaniel",
"Nathen",
"Nayeli",
"Neal",
"Ned",
"Nedra",
"Neha",
"Neil",
"Nelda",
"Nella",
"Nelle",
"Nellie",
"Nels",
"Nelson",
"Neoma",
"Nestor",
"Nettie",
"Neva",
"Newell",
"Newton",
"Nia",
"Nicholas",
"Nicholaus",
"Nichole",
"Nick",
"Nicklaus",
"Nickolas",
"Nico",
"Nicola",
"Nicolas",
"Nicole",
"Nicolette",
"Nigel",
"Nikita",
"Nikki",
"Nikko",
"Niko",
"Nikolas",
"Nils",
"Nina",
"Noah",
"Noble",
"Noe",
"Noel",
"Noelia",
"Noemi",
"Noemie",
"Noemy",
"Nola",
"Nolan",
"Nona",
"Nora",
"Norbert",
"Norberto",
"Norene",
"Norma",
"Norris",
"Norval",
"Norwood",
"Nova",
"Novella",
"Nya",
"Nyah",
"Nyasia",
"Obie",
"Oceane",
"Ocie",
"Octavia",
"Oda",
"Odell",
"Odessa",
"Odie",
"Ofelia",
"Okey",
"Ola",
"Olaf",
"Ole",
"Olen",
"Oleta",
"Olga",
"Olin",
"Oliver",
"Ollie",
"Oma",
"Omari",
"Omer",
"Ona",
"Onie",
"Opal",
"Ophelia",
"Ora",
"Oral",
"Oran",
"Oren",
"Orie",
"Orin",
"Orion",
"Orland",
"Orlando",
"Orlo",
"Orpha",
"Orrin",
"Orval",
"Orville",
"Osbaldo",
"Osborne",
"Oscar",
"Osvaldo",
"Oswald",
"Oswaldo",
"Otha",
"Otho",
"Otilia",
"Otis",
"Ottilie",
"Ottis",
"Otto",
"Ova",
"Owen",
"Ozella",
"Pablo",
"Paige",
"Palma",
"Pamela",
"Pansy",
"Paolo",
"Paris",
"Parker",
"Pascale",
"Pasquale",
"Pat",
"Patience",
"Patricia",
"Patrick",
"Patsy",
"Pattie",
"Paul",
"Paula",
"Pauline",
"Paxton",
"Payton",
"Pearl",
"Pearlie",
"Pearline",
"Pedro",
"Peggie",
"Penelope",
"Percival",
"Percy",
"Perry",
"Pete",
"Peter",
"Petra",
"Peyton",
"Philip",
"Phoebe",
"Phyllis",
"Pierce",
"Pierre",
"Pietro",
"Pink",
"Pinkie",
"Piper",
"Polly",
"Porter",
"Precious",
"Presley",
"Preston",
"Price",
"Prince",
"Princess",
"Priscilla",
"Providenci",
"Prudence",
"Queen",
"Queenie",
"Quentin",
"Quincy",
"Quinn",
"Quinten",
"Quinton",
"Rachael",
"Rachel",
"Rachelle",
"Rae",
"Raegan",
"Rafael",
"Rafaela",
"Raheem",
"Rahsaan",
"Rahul",
"Raina",
"Raleigh",
"Ralph",
"Ramiro",
"Ramon",
"Ramona",
"Randal",
"Randall",
"Randi",
"Randy",
"Ransom",
"Raoul",
"Raphael",
"Raphaelle",
"Raquel",
"Rashad",
"Rashawn",
"Rasheed",
"Raul",
"Raven",
"Ray",
"Raymond",
"Raymundo",
"Reagan",
"Reanna",
"Reba",
"Rebeca",
"Rebecca",
"Rebeka",
"Rebekah",
"Reece",
"Reed",
"Reese",
"Regan",
"Reggie",
"Reginald",
"Reid",
"Reilly",
"Reina",
"Reinhold",
"Remington",
"Rene",
"Renee",
"Ressie",
"Reta",
"Retha",
"Retta",
"Reuben",
"Reva",
"Rex",
"Rey",
"Reyes",
"Reymundo",
"Reyna",
"Reynold",
"Rhea",
"Rhett",
"Rhianna",
"Rhiannon",
"Rhoda",
"Ricardo",
"Richard",
"Richie",
"Richmond",
"Rick",
"Rickey",
"Rickie",
"Ricky",
"Rico",
"Rigoberto",
"Riley",
"Rita",
"River",
"Robb",
"Robbie",
"Robert",
"Roberta",
"Roberto",
"Robin",
"Robyn",
"Rocio",
"Rocky",
"Rod",
"Roderick",
"Rodger",
"Rodolfo",
"Rodrick",
"Rodrigo",
"Roel",
"Rogelio",
"Roger",
"Rogers",
"Rolando",
"Rollin",
"Roma",
"Romaine",
"Roman",
"Ron",
"Ronaldo",
"Ronny",
"Roosevelt",
"Rory",
"Rosa",
"Rosalee",
"Rosalia",
"Rosalind",
"Rosalinda",
"Rosalyn",
"Rosamond",
"Rosanna",
"Rosario",
"Roscoe",
"Rose",
"Rosella",
"Roselyn",
"Rosemarie",
"Rosemary",
"Rosendo",
"Rosetta",
"Rosie",
"Rosina",
"Roslyn",
"Ross",
"Rossie",
"Rowan",
"Rowena",
"Rowland",
"Roxane",
"Roxanne",
"Roy",
"Royal",
"Royce",
"Rozella",
"Ruben",
"Rubie",
"Ruby",
"Rubye",
"Rudolph",
"Rudy",
"Rupert",
"Russ",
"Russel",
"Russell",
"Rusty",
"Ruth",
"Ruthe",
"Ruthie",
"Ryan",
"Ryann",
"Ryder",
"Rylan",
"Rylee",
"Ryleigh",
"Ryley",
"Sabina",
"Sabrina",
"Sabryna",
"Sadie",
"Sadye",
"Sage",
"Saige",
"Sallie",
"Sally",
"Salma",
"Salvador",
"Salvatore",
"Sam",
"Samanta",
"Samantha",
"Samara",
"Samir",
"Sammie",
"Sammy",
"Samson",
"Sandra",
"Sandrine",
"Sandy",
"Sanford",
"Santa",
"Santiago",
"Santina",
"Santino",
"Santos",
"Sarah",
"Sarai",
"Sarina",
"Sasha",
"Saul",
"Savanah",
"Savanna",
"Savannah",
"Savion",
"Scarlett",
"Schuyler",
"Scot",
"Scottie",
"Scotty",
"Seamus",
"Sean",
"Sebastian",
"Sedrick",
"Selena",
"Selina",
"Selmer",
"Serena",
"Serenity",
"Seth",
"Shad",
"Shaina",
"Shakira",
"Shana",
"Shane",
"Shanel",
"Shanelle",
"Shania",
"Shanie",
"Shaniya",
"Shanna",
"Shannon",
"Shanny",
"Shanon",
"Shany",
"Sharon",
"Shaun",
"Shawn",
"Shawna",
"Shaylee",
"Shayna",
"Shayne",
"Shea",
"Sheila",
"Sheldon",
"Shemar",
"Sheridan",
"Sherman",
"Sherwood",
"Shirley",
"Shyann",
"Shyanne",
"Sibyl",
"Sid",
"Sidney",
"Sienna",
"Sierra",
"Sigmund",
"Sigrid",
"Sigurd",
"Silas",
"Sim",
"Simeon",
"Simone",
"Sincere",
"Sister",
"Skye",
"Skyla",
"Skylar",
"Sofia",
"Soledad",
"Solon",
"Sonia",
"Sonny",
"Sonya",
"Sophia",
"Sophie",
"Spencer",
"Stacey",
"Stacy",
"Stan",
"Stanford",
"Stanley",
"Stanton",
"Stefan",
"Stefanie",
"Stella",
"Stephan",
"Stephania",
"Stephanie",
"Stephany",
"Stephen",
"Stephon",
"Sterling",
"Steve",
"Stevie",
"Stewart",
"Stone",
"Stuart",
"Summer",
"Sunny",
"Susan",
"Susana",
"Susanna",
"Susie",
"Suzanne",
"Sven",
"Syble",
"Sydnee",
"Sydney",
"Sydni",
"Sydnie",
"Sylvan",
"Sylvester",
"Sylvia",
"Tabitha",
"Tad",
"Talia",
"Talon",
"Tamara",
"Tamia",
"Tania",
"Tanner",
"Tanya",
"Tara",
"Taryn",
"Tate",
"Tatum",
"Tatyana",
"Taurean",
"Tavares",
"Taya",
"Taylor",
"Teagan",
"Ted",
"Telly",
"Terence",
"Teresa",
"Terrance",
"Terrell",
"Terrence",
"Terrill",
"Terry",
"Tess",
"Tessie",
"Tevin",
"Thad",
"Thaddeus",
"Thalia",
"Thea",
"Thelma",
"Theo",
"Theodora",
"Theodore",
"Theresa",
"Therese",
"Theresia",
"Theron",
"Thomas",
"Thora",
"Thurman",
"Tia",
"Tiana",
"Tianna",
"Tiara",
"Tierra",
"Tiffany",
"Tillman",
"Timmothy",
"Timmy",
"Timothy",
"Tina",
"Tito",
"Titus",
"Tobin",
"Toby",
"Tod",
"Tom",
"Tomas",
"Tomasa",
"Tommie",
"Toney",
"Toni",
"Tony",
"Torey",
"Torrance",
"Torrey",
"Toy",
"Trace",
"Tracey",
"Tracy",
"Travis",
"Travon",
"Tre",
"Tremaine",
"Tremayne",
"Trent",
"Trenton",
"Tressa",
"Tressie",
"Treva",
"Trever",
"Trevion",
"Trevor",
"Trey",
"Trinity",
"Trisha",
"Tristian",
"Tristin",
"Triston",
"Troy",
"Trudie",
"Trycia",
"Trystan",
"Turner",
"Twila",
"Tyler",
"Tyra",
"Tyree",
"Tyreek",
"Tyrel",
"Tyrell",
"Tyrese",
"Tyrique",
"Tyshawn",
"Tyson",
"Ubaldo",
"Ulices",
"Ulises",
"Una",
"Unique",
"Urban",
"Uriah",
"Uriel",
"Ursula",
"Vada",
"Valentin",
"Valentina",
"Valentine",
"Valerie",
"Vallie",
"Van",
"Vance",
"Vanessa",
"Vaughn",
"Veda",
"Velda",
"Vella",
"Velma",
"Velva",
"Vena",
"Verda",
"Verdie",
"Vergie",
"Verla",
"Verlie",
"Vern",
"Verna",
"Verner",
"Vernice",
"Vernie",
"Vernon",
"Verona",
"Veronica",
"Vesta",
"Vicenta",
"Vicente",
"Vickie",
"Vicky",
"Victor",
"Victoria",
"Vida",
"Vidal",
"Vilma",
"Vince",
"Vincent",
"Vincenza",
"Vincenzo",
"Vinnie",
"Viola",
"Violet",
"Violette",
"Virgie",
"Virgil",
"Virginia",
"Virginie",
"Vita",
"Vito",
"Viva",
"Vivian",
"Viviane",
"Vivianne",
"Vivien",
"Vivienne",
"Vladimir",
"Wade",
"Waino",
"Waldo",
"Walker",
"Wallace",
"Walter",
"Walton",
"Wanda",
"Ward",
"Warren",
"Watson",
"Wava",
"Waylon",
"Wayne",
"Webster",
"Weldon",
"Wellington",
"Wendell",
"Wendy",
"Werner",
"Westley",
"Weston",
"Whitney",
"Wilber",
"Wilbert",
"Wilburn",
"Wiley",
"Wilford",
"Wilfred",
"Wilfredo",
"Wilfrid",
"Wilhelm",
"Wilhelmine",
"Will",
"Willa",
"Willard",
"William",
"Willie",
"Willis",
"Willow",
"Willy",
"Wilma",
"Wilmer",
"Wilson",
"Wilton",
"Winfield",
"Winifred",
"Winnifred",
"Winona",
"Winston",
"Woodrow",
"Wyatt",
"Wyman",
"Xander",
"Xavier",
"Xzavier",
"Yadira",
"Yasmeen",
"Yasmin",
"Yasmine",
"Yazmin",
"Yesenia",
"Yessenia",
"Yolanda",
"Yoshiko",
"Yvette",
"Yvonne",
"Zachariah",
"Zachary",
"Zachery",
"Zack",
"Zackary",
"Zackery",
"Zakary",
"Zander",
"Zane",
"Zaria",
"Zechariah",
"Zelda",
"Zella",
"Zelma",
"Zena",
"Zetta",
"Zion",
"Zita",
"Zoe",
"Zoey",
"Zoie",
"Zoila",
"Zola",
"Zora",
"Zula"
])
@doc """
Returns a random last name
## Examples
iex> Faker.Name.En.last_name()
"Blick"
iex> Faker.Name.En.last_name()
"Hayes"
iex> Faker.Name.En.last_name()
"Schumm"
iex> Faker.Name.En.last_name()
"Rolfson"
"""
@spec last_name() :: String.t()
sampler(:last_name, [
"Abbott",
"Abernathy",
"Abshire",
"Adams",
"Altenwerth",
"Anderson",
"Ankunding",
"Armstrong",
"Auer",
"Aufderhar",
"Bahringer",
"Bailey",
"Balistreri",
"Barrows",
"Bartell",
"Bartoletti",
"Barton",
"Bashirian",
"Batz",
"Bauch",
"Baumbach",
"Bayer",
"Beahan",
"Beatty",
"Bechtelar",
"Becker",
"Bednar",
"Beer",
"Beier",
"Berge",
"Bergnaum",
"Bergstrom",
"Bernhard",
"Bernier",
"Bins",
"Blanda",
"Blick",
"Block",
"Bode",
"Boehm",
"Bogan",
"Bogisich",
"Borer",
"Bosco",
"Botsford",
"Boyer",
"Boyle",
"Bradtke",
"Brakus",
"Braun",
"Breitenberg",
"Brekke",
"Brown",
"Bruen",
"Buckridge",
"Carroll",
"Carter",
"Cartwright",
"Casper",
"Cassin",
"Champlin",
"Christiansen",
"Cole",
"Collier",
"Collins",
"Conn",
"Connelly",
"Conroy",
"Considine",
"Corkery",
"Cormier",
"Corwin",
"Cremin",
"Crist",
"Crona",
"Cronin",
"Crooks",
"Cruickshank",
"Cummerata",
"Cummings",
"Dach",
"D'Amore",
"Daniel",
"Dare",
"Daugherty",
"Davis",
"Deckow",
"Denesik",
"Dibbert",
"Dickens",
"Dicki",
"Dickinson",
"Dietrich",
"Donnelly",
"Dooley",
"Douglas",
"Doyle",
"DuBuque",
"Durgan",
"Ebert",
"Effertz",
"Eichmann",
"Emard",
"Emmerich",
"Erdman",
"Ernser",
"Fadel",
"Fahey",
"Farrell",
"Fay",
"Feeney",
"Feest",
"Feil",
"Ferry",
"Fisher",
"Flatley",
"Frami",
"Franecki",
"Friesen",
"Fritsch",
"Funk",
"Gaylord",
"Gerhold",
"Gerlach",
"Gibson",
"Gislason",
"Gleason",
"Gleichner",
"Glover",
"Goldner",
"Goodwin",
"Gorczany",
"Gottlieb",
"Goyette",
"Grady",
"Graham",
"Grant",
"Green",
"Greenfelder",
"Greenholt",
"Grimes",
"Gulgowski",
"Gusikowski",
"Gutkowski",
"Gutmann",
"Haag",
"Hackett",
"Hagenes",
"Hahn",
"Haley",
"Halvorson",
"Hamill",
"Hammes",
"Hand",
"Hane",
"Hansen",
"Harber",
"Harris",
"Hartmann",
"Harvey",
"Hauck",
"Hayes",
"Heaney",
"Heathcote",
"Hegmann",
"Heidenreich",
"Heller",
"Herman",
"Hermann",
"Hermiston",
"Herzog",
"Hessel",
"Hettinger",
"Hickle",
"Hilll",
"Hills",
"Hilpert",
"Hintz",
"Hirthe",
"Hodkiewicz",
"Hoeger",
"Homenick",
"Hoppe",
"Howe",
"Howell",
"Hudson",
"Huel",
"Huels",
"Hyatt",
"Jacobi",
"Jacobs",
"Jacobson",
"Jakubowski",
"Jaskolski",
"Jast",
"Jenkins",
"Jerde",
"Johns",
"Johnson",
"Johnston",
"Jones",
"Kassulke",
"Kautzer",
"Keebler",
"Keeling",
"Kemmer",
"Kerluke",
"Kertzmann",
"Kessler",
"Kiehn",
"Kihn",
"Kilback",
"King",
"Kirlin",
"Klein",
"Kling",
"Klocko",
"Koch",
"Koelpin",
"Koepp",
"Kohler",
"Konopelski",
"Koss",
"Kovacek",
"Kozey",
"Krajcik",
"Kreiger",
"Kris",
"Kshlerin",
"Kub",
"Kuhic",
"Kuhlman",
"Kuhn",
"Kulas",
"Kunde",
"Kunze",
"Kuphal",
"Kutch",
"Kuvalis",
"Labadie",
"Lakin",
"Lang",
"Langosh",
"Langworth",
"Larkin",
"Larson",
"Leannon",
"Lebsack",
"Ledner",
"Leffler",
"Legros",
"Lehner",
"Lemke",
"Lesch",
"Leuschke",
"Lind",
"Lindgren",
"Littel",
"Little",
"Lockman",
"Lowe",
"Lubowitz",
"Lueilwitz",
"Luettgen",
"Lynch",
"Macejkovic",
"MacGyver",
"Maggio",
"Mann",
"Mante",
"Marks",
"Marquardt",
"Marvin",
"Mayer",
"Mayert",
"McClure",
"McCullough",
"McDermott",
"McGlynn",
"McKenzie",
"McLaughlin",
"Medhurst",
"Mertz",
"Metz",
"Miller",
"Mills",
"Mitchell",
"Moen",
"Mohr",
"Monahan",
"Moore",
"Morar",
"Morissette",
"Mosciski",
"Mraz",
"Mueller",
"Muller",
"Murazik",
"Murphy",
"Murray",
"Nader",
"Nicolas",
"Nienow",
"Nikolaus",
"Nitzsche",
"Nolan",
"Oberbrunner",
"O'Connell",
"O'Conner",
"O'Hara",
"O'Keefe",
"O'Kon",
"Okuneva",
"Olson",
"Ondricka",
"O'Reilly",
"Orn",
"Ortiz",
"Osinski",
"Pacocha",
"Padberg",
"Pagac",
"Parisian",
"Parker",
"Paucek",
"Pfannerstill",
"Pfeffer",
"Pollich",
"Pouros",
"Powlowski",
"Predovic",
"Price",
"Prohaska",
"Prosacco",
"Purdy",
"Quigley",
"Quitzon",
"Rath",
"Ratke",
"Rau",
"Raynor",
"Reichel",
"Reichert",
"Reilly",
"Reinger",
"Rempel",
"Renner",
"Reynolds",
"Rice",
"Rippin",
"Ritchie",
"Robel",
"Roberts",
"Rodriguez",
"Rogahn",
"Rohan",
"Rolfson",
"Romaguera",
"Roob",
"Rosenbaum",
"Rowe",
"Ruecker",
"Runolfsdottir",
"Runolfsson",
"Runte",
"Russel",
"Rutherford",
"Ryan",
"Sanford",
"Satterfield",
"Sauer",
"Sawayn",
"Schaden",
"Schaefer",
"Schamberger",
"Schiller",
"Schimmel",
"Schinner",
"Schmeler",
"Schmidt",
"Schmitt",
"Schneider",
"Schoen",
"Schowalter",
"Schroeder",
"Schulist",
"Schultz",
"Schumm",
"Schuppe",
"Schuster",
"Senger",
"Shanahan",
"Shields",
"Simonis",
"Sipes",
"Skiles",
"Smith",
"Smitham",
"Spencer",
"Spinka",
"Sporer",
"Stamm",
"Stanton",
"Stark",
"Stehr",
"Steuber",
"Stiedemann",
"Stokes",
"Stoltenberg",
"Stracke",
"Streich",
"Stroman",
"Strosin",
"Swaniawski",
"Swift",
"Terry",
"Thiel",
"Thompson",
"Tillman",
"Torp",
"Torphy",
"Towne",
"Toy",
"Trantow",
"Tremblay",
"Treutel",
"Tromp",
"Turcotte",
"Turner",
"Ullrich",
"Upton",
"Vandervort",
"Veum",
"Volkman",
"Von",
"VonRueden",
"Waelchi",
"Walker",
"Walsh",
"Walter",
"Ward",
"Waters",
"Watsica",
"Weber",
"Wehner",
"Weimann",
"Weissnat",
"Welch",
"West",
"White",
"Wiegand",
"Wilderman",
"Wilkinson",
"Will",
"Williamson",
"Willms",
"Windler",
"Wintheiser",
"Wisoky",
"Wisozk",
"Witting",
"Wiza",
"Wolf",
"Wolff",
"Wuckert",
"Wunsch",
"Wyman",
"Yost",
"Yundt",
"Zboncak",
"Zemlak",
"Ziemann",
"Zieme",
"Zulauf"
])
@doc """
Returns a random prefix
## Examples
iex> Faker.Name.En.prefix()
"Mr."
iex> Faker.Name.En.prefix()
"Mrs."
iex> Faker.Name.En.prefix()
"Mr."
iex> Faker.Name.En.prefix()
"Dr."
"""
@spec prefix() :: String.t()
sampler(:prefix, [
"Mr.",
"Mrs.",
"Ms.",
"Miss",
"Dr."
])
@doc """
Returns a random suffix
## Examples
iex> Faker.Name.En.suffix()
"II"
iex> Faker.Name.En.suffix()
"V"
iex> Faker.Name.En.suffix()
"V"
iex> Faker.Name.En.suffix()
"V"
"""
@spec suffix() :: String.t()
sampler(:suffix, [
"Jr.",
"Sr.",
"I",
"II",
"III",
"IV",
"V",
"MD",
"DDS",
"PhD",
"DVM"
])
@doc """
Returns a random complete job title
## Examples
iex> Faker.Name.En.title()
"Dynamic Identity Administrator"
iex> Faker.Name.En.title()
"Product Communications Technician"
iex> Faker.Name.En.title()
"Legacy Accountability Architect"
iex> Faker.Name.En.title()
"Customer Data Representative"
"""
@spec title() :: String.t()
def title do
"#{title_descriptor()} #{title_level()} #{title_job()}"
end
@doc """
Returns a random job title descriptor
## Examples
iex> Faker.Name.En.title_descriptor()
"Dynamic"
iex> Faker.Name.En.title_descriptor()
"Forward"
iex> Faker.Name.En.title_descriptor()
"Forward"
iex> Faker.Name.En.title_descriptor()
"Product"
"""
@spec title_descriptor() :: String.t()
sampler(:title_descriptor, [
"Lead",
"Senior",
"Direct",
"Corporate",
"Dynamic",
"Future",
"Product",
"National",
"Regional",
"District",
"Central",
"Global",
"Customer",
"Investor",
"Dynamic",
"International",
"Legacy",
"Forward",
"Internal",
"Human",
"Chief",
"Principal"
])
@doc """
Returns a random job title name
## Examples
iex> Faker.Name.En.title_job()
"Administrator"
iex> Faker.Name.En.title_job()
"Associate"
iex> Faker.Name.En.title_job()
"Administrator"
iex> Faker.Name.En.title_job()
"Officer"
"""
@spec title_job() :: String.t()
sampler(:title_job, [
"Supervisor",
"Associate",
"Executive",
"Liason",
"Officer",
"Manager",
"Engineer",
"Specialist",
"Director",
"Coordinator",
"Administrator",
"Architect",
"Analyst",
"Designer",
"Planner",
"Orchestrator",
"Technician",
"Developer",
"Producer",
"Consultant",
"Assistant",
"Facilitator",
"Agent",
"Representative",
"Strategist"
])
@doc """
Returns a random job title level
## Examples
iex> Faker.Name.En.title_level()
"Metrics"
iex> Faker.Name.En.title_level()
"Identity"
iex> Faker.Name.En.title_level()
"Assurance"
iex> Faker.Name.En.title_level()
"Intranet"
"""
@spec title_level() :: String.t()
sampler(:title_level, [
"Solutions",
"Program",
"Brand",
"Security",
"Research",
"Marketing",
"Directives",
"Implementation",
"Integration",
"Functionality",
"Response",
"Paradigm",
"Tactics",
"Identity",
"Markets",
"Group",
"Division",
"Applications",
"Optimization",
"Operations",
"Infrastructure",
"Intranet",
"Communications",
"Web",
"Branding",
"Quality",
"Assurance",
"Mobility",
"Accounts",
"Data",
"Creative",
"Configuration",
"Accountability",
"Interactions",
"Factors",
"Usability",
"Metrics"
])
end
|
lib/faker/name/en.ex
| 0.650356
| 0.468487
|
en.ex
|
starcoder
|
defmodule AdventOfCode.Y2020.Day21 do
def run() do
parsed =
AdventOfCode.Helpers.Data.read_from_file_no_split("2020/day21.txt")
|> parse()
{solve1(parsed), solve2(parsed)}
end
def solve1(parsed) do
safe_ingredients = allergen_free(parsed)
parsed
|> Stream.flat_map(fn {_, ingredients} -> ingredients end)
|> Stream.filter(&MapSet.member?(safe_ingredients, &1))
|> Enum.count()
end
def solve2(parsed) do
parsed
|> allergen_to_possible_ingredients()
|> allergens_to_certain_ingredient()
|> Enum.sort(fn a, b -> elem(a, 0) < elem(b, 0) end)
|> Enum.map(&elem(&1, 1))
|> Enum.join(",")
end
def allergen_free(parsed) do
all_ingredients =
parsed |> Enum.reduce(MapSet.new(), fn {_, i}, acc -> MapSet.union(i, acc) end)
parsed
|> allergen_to_possible_ingredients()
|> Enum.reduce(all_ingredients, fn {_, ingredients}, acc ->
MapSet.difference(acc, ingredients)
end)
end
def dish_to_allergen_map({allergens, ingredients}) do
allergens
|> Enum.map(fn allergen -> {allergen, ingredients} end)
|> Map.new()
end
def allergen_to_possible_ingredients(lines) do
lines
|> Enum.map(&dish_to_allergen_map/1)
|> Enum.reduce(fn map, acc ->
Map.merge(map, acc, fn _, v1, v2 -> MapSet.intersection(v1, v2) end)
end)
end
def allergens_to_certain_ingredient(unmapped) do
{[], Enum.to_list(unmapped)}
|> Stream.iterate(&extract_allergen_matches/1)
|> Stream.take_while(&more_allergens_to_process?/1)
|> Stream.drop(1)
|> Stream.flat_map(fn {matches, _} ->
matches |> Enum.map(fn {k, v} -> {k, Enum.take(v, 1) |> hd} end)
end)
|> Map.new()
end
def more_allergens_to_process?(progress) do
progress
|> Tuple.to_list()
|> Enum.all?(&Enum.empty?/1)
|> Kernel.not()
end
def extract_allergen_matches({last_matches, unprocessed}) do
matches =
last_matches |> Enum.reduce(MapSet.new(), fn {_, v}, acc -> MapSet.union(acc, v) end)
unprocessed
|> Enum.map(fn {key, ingredients} -> {key, MapSet.difference(ingredients, matches)} end)
|> Enum.split_with(fn {_, ingredients} -> MapSet.size(ingredients) == 1 end)
end
def parse(data) do
data
|> String.split("\n", trim: true)
|> Enum.map(&parse_line/1)
end
def parse_line(line) do
line
|> String.replace(~r/[\(\),]/, "")
|> String.split("contains", trim: true)
|> Enum.map(&String.split/1)
|> (fn [ingredients, allergens] -> {allergens, MapSet.new(ingredients)} end).()
end
end
|
lib/2020/day21.ex
| 0.557966
| 0.538194
|
day21.ex
|
starcoder
|
defmodule RayTracer.Tasks.Chapter10 do
@moduledoc """
This module tests camera from Chapter 10
"""
alias RayTracer.RTuple
alias RayTracer.Sphere
alias RayTracer.Plane
alias RayTracer.Canvas
alias RayTracer.Material
alias RayTracer.Color
alias RayTracer.Light
alias RayTracer.World
alias RayTracer.Camera
alias RayTracer.Matrix
alias RayTracer.StripePattern
alias RayTracer.GradientPattern
alias RayTracer.RingPattern
alias RayTracer.BlendedPattern
import RTuple, only: [point: 3, vector: 3]
import Light, only: [point_light: 2]
import RayTracer.Transformations
@doc """
Generates a file that tests rendering a world
"""
@spec execute :: :ok
def execute(w \\ 100, h \\ 50) do
# RayTracer.Tasks.Chapter10.execute
world = build_world()
camera = build_camera(w, h)
camera
|> Camera.render(world)
|> Canvas.export_to_ppm_file
:ok
end
defp build_world do
objects = [
back_wall(), floor(), left_sphere(), middle_sphere(), right_sphere()
]
light = point_light(point(-10, 10, -10), Color.new(1, 1, 1))
World.new(objects, light)
end
defp build_camera(w, h) do
transform = view_transform(point(0, 1.5, -5), point(0, 1, 0), vector(0, 1, 0))
Camera.new(w, h, :math.pi / 3, transform)
end
defp back_wall do
transform =
translation(0, 0, 2.5)
|> Matrix.mult(rotation_x(:math.pi / 2))
a = StripePattern.new(Color.black, Color.white)
b = StripePattern.new(Color.new_from_rgb_255(255, 0, 0), Color.white, rotation_y(:math.pi / 2))
pattern = %BlendedPattern{a: a, b: b, transform: rotation_z(:math.pi / 4)}
material = %Material{specular: 0, pattern: pattern}
%Plane{material: material, transform: transform}
end
defp floor do
pattern = StripePattern.new(Color.black, Color.white)
material = %Material{specular: 0, pattern: pattern}
%Plane{material: material}
end
defp left_sphere do
transform = translation(-1.5, 0.33, -0.75) |> Matrix.mult(scaling(0.33, 0.33, 0.33))
pattern = GradientPattern.new(Color.new(1, 1, 1), Color.new_from_rgb_255(68, 112, 43), scaling(2, 1, 1) |> Matrix.mult(translation(0.5, 0, 0)))
material = %Material{Material.new | color: Color.new(1, 0.8, 0.1), specular: 0.3, diffuse: 0.7, pattern: pattern}
%Sphere{Sphere.new | material: material, transform: transform}
end
defp middle_sphere do
transform = translation(-0.5, 1, 0.5)
pattern = RingPattern.new(Color.new(1, 1, 1), Color.new_from_rgb_255(68, 112, 43), scaling(0.2, 0.2, 0.2) |> Matrix.mult(rotation_x(:math.pi / 2)))
material = %Material{specular: 0.3, diffuse: 0.7, pattern: pattern}
%Sphere{Sphere.new | material: material, transform: transform}
end
defp right_sphere do
transform = translation(1.5, 0.5, -0.5) |> Matrix.mult(scaling(0.5, 0.5, 0.5))
pattern = StripePattern.new(Color.new(1, 1, 1), Color.new_from_rgb_255(68, 112, 43), scaling(0.2, 1, 1))
material = %Material{pattern: pattern, specular: 0.3, diffuse: 0.7}
%Sphere{Sphere.new | material: material, transform: transform}
end
end
|
lib/tasks/chapter10.ex
| 0.900876
| 0.566468
|
chapter10.ex
|
starcoder
|
defmodule Cloudinary.Transformation.Color do
@moduledoc """
The color type definition for transformation parameters.
"""
@typedoc """
The color represented by a RGB/RGBA hex triplet or a color name string.
It treats a `t:charlist/0` (like `'a0fc72'`) as a hex triplet and a `t:String.t/0` (like
`"green"`) as a color name.
"""
@type t :: charlist | String.t()
defguardp is_hex_digit(d) when d in '0123456789ABCDEFabcdef'
@doc """
Whether the argument is a charlist of the RGB hex tripet or not.
## Example
iex> #{__MODULE__}.is_rgb('e876f3')
true
iex> #{__MODULE__}.is_rgb('g680a7')
false
iex> #{__MODULE__}.is_rgb('12E34BA5')
false
iex> #{__MODULE__}.is_rgb('aab')
true
"""
defguard is_rgb(rgb)
when is_list(rgb) and
((length(rgb) == 6 and
is_hex_digit(hd(rgb)) and is_hex_digit(hd(tl(rgb))) and
is_hex_digit(hd(tl(tl(rgb)))) and is_hex_digit(hd(tl(tl(tl(rgb))))) and
is_hex_digit(hd(tl(tl(tl(tl(rgb)))))) and
is_hex_digit(hd(tl(tl(tl(tl(tl(rgb)))))))) or
(length(rgb) == 3 and
is_hex_digit(hd(rgb)) and is_hex_digit(hd(tl(rgb))) and
is_hex_digit(hd(tl(tl(rgb))))))
@doc """
Whether the argument is a charlist of the RGBA hex tripet or not.
## Example
iex> #{__MODULE__}.is_rgba('e876f3f0')
true
iex> #{__MODULE__}.is_rgba('g680a75d')
false
iex> #{__MODULE__}.is_rgba('12E34B')
false
iex> #{__MODULE__}.is_rgba('abfd')
true
"""
defguard is_rgba(rgba)
when is_list(rgba) and
((length(rgba) == 8 and
is_hex_digit(hd(rgba)) and is_hex_digit(hd(tl(rgba))) and
is_hex_digit(hd(tl(tl(rgba)))) and is_hex_digit(hd(tl(tl(tl(rgba))))) and
is_hex_digit(hd(tl(tl(tl(tl(rgba)))))) and
is_hex_digit(hd(tl(tl(tl(tl(tl(rgba))))))) and
is_hex_digit(hd(tl(tl(tl(tl(tl(tl(rgba)))))))) and
is_hex_digit(hd(tl(tl(tl(tl(tl(tl(tl(rgba)))))))))) or
(length(rgba) == 4 and
is_hex_digit(hd(rgba)) and is_hex_digit(hd(tl(rgba))) and
is_hex_digit(hd(tl(tl(rgba)))) and is_hex_digit(hd(tl(tl(tl(rgba)))))))
end
|
lib/cloudinary/transformation/color.ex
| 0.913768
| 0.412826
|
color.ex
|
starcoder
|
defmodule Game.Overworld do
@moduledoc """
Overworld helpers
"""
@type cell :: %{x: integer(), y: integer()}
@sector_boundary 10
@view_distance 10
@doc """
Break up an overworld id
"""
@spec split_id(String.t()) :: {integer(), cell()}
def split_id(overworld_id) do
try do
[zone_id, cell] = String.split(overworld_id, ":")
[x, y] = String.split(cell, ",")
cell = %{x: String.to_integer(x), y: String.to_integer(y)}
{String.to_integer(zone_id), cell}
rescue
MatchError ->
:error
end
end
@doc """
Take an overworld id and convert it to a zone id and sector
"""
def sector_from_overworld_id(overworld_id) do
{zone_id, cell} = split_id(overworld_id)
{zone_id, cell_sector(cell)}
end
@doc """
Figure out what sectors are in an overworld
"""
@spec break_into_sectors([Sector.t()]) :: [Sector.id()]
def break_into_sectors(overworld) do
Enum.reduce(overworld, [], fn cell, sectors ->
sector_id = cell_sector(cell)
Enum.uniq([sector_id | sectors])
end)
end
@doc """
Determine a cell sector by a zone string id
"""
def cell_sector(cell) do
x = div(cell.x, @sector_boundary)
y = div(cell.y, @sector_boundary)
"#{x}-#{y}"
end
@doc """
Determine exits for a cell in the overworld
"""
def exits(zone, cell) do
start_id = "overworld:#{zone.id}:#{cell.x},#{cell.y}"
north = %{direction: "north", start_id: start_id, x: cell.x, y: cell.y - 1}
south = %{direction: "south", start_id: start_id, x: cell.x, y: cell.y + 1}
east = %{direction: "east", start_id: start_id, x: cell.x + 1, y: cell.y}
west = %{direction: "west", start_id: start_id, x: cell.x - 1, y: cell.y}
north_west = %{direction: "north west", start_id: start_id, x: cell.x - 1, y: cell.y - 1}
north_east = %{direction: "north east", start_id: start_id, x: cell.x + 1, y: cell.y - 1}
south_west = %{direction: "south west", start_id: start_id, x: cell.x - 1, y: cell.y + 1}
south_east = %{direction: "south east", start_id: start_id, x: cell.x + 1, y: cell.y + 1}
[north, south, east, west, north_west, north_east, south_west, south_east]
|> Enum.filter(&in_overworld?(&1, zone))
|> Enum.map(fn direction ->
real_exit =
Enum.find(zone.exits, &(&1.start_id == start_id && &1.direction == direction.direction))
case real_exit do
nil ->
finish_id = "overworld:#{zone.id}:#{direction.x},#{direction.y}"
%{
id: direction.start_id,
direction: direction.direction,
start_id: direction.start_id,
finish_id: finish_id
}
real_exit ->
real_exit
end
end)
end
defp in_overworld?(direction, zone) do
Enum.any?(zone.overworld_map, fn cell ->
cell.x == direction.x && cell.y == direction.y && !cell_empty?(cell)
end)
end
defp cell_empty?(cell) do
is_nil(cell.c) && cell.s == " "
end
@doc """
Generate an overworld map around the cell
The cell's x,y will be in the center and an `X`
"""
def map(zone, cell) do
zone.overworld_map
|> Enum.filter(fn overworld_cell ->
close?(overworld_cell, cell, @view_distance)
end)
|> Enum.group_by(& &1.y)
|> Enum.into([])
|> Enum.sort(fn {y_a, _}, {y_b, _} ->
y_a <= y_b
end)
|> Enum.reduce(%{}, fn {_y, cells}, map ->
cells
|> Enum.sort(&(&1.x <= &2.x))
|> Enum.reduce(map, fn overworld_cell, map ->
row = Map.get(map, overworld_cell.y, %{})
row = Map.put(row, overworld_cell.x, format_cell(overworld_cell, cell))
Map.put(map, overworld_cell.y, row)
end)
end)
|> format_map(cell)
end
defp format_map(map, cell) do
min_x = cell.x - @view_distance + 1
max_x = cell.x + @view_distance - 1
min_y = cell.y - @view_distance + 1
max_y = cell.y + @view_distance - 1
min_y..max_y
|> Enum.map(fn y ->
min_x..max_x
|> Enum.map(fn x ->
map
|> Map.get(y, %{})
|> Map.get(x, " ")
end)
|> Enum.join()
end)
|> Enum.join("\n")
end
defp close?(cell_a, cell_b, expected) do
actual =
round(:math.sqrt(:math.pow(cell_b.x - cell_a.x, 2) + :math.pow(cell_b.y - cell_a.y, 2)))
actual < expected
end
defp format_cell(overworld_cell, cell) do
case overworld_cell.x == cell.x && overworld_cell.y == cell.y do
true ->
"X "
false ->
case overworld_cell.c do
nil ->
overworld_cell.s <> " "
color ->
"{#{color}}#{overworld_cell.s} {/#{color}}"
end
end
end
end
|
lib/game/overworld.ex
| 0.69451
| 0.630685
|
overworld.ex
|
starcoder
|
defmodule Riptide.Store.LMDB do
@moduledoc """
This store persists data to [LMDB](https://symas.com/lmdb/) using a bridge built in Rust. LMDB is a is a fast, memory mapped key value store that is persisted to a single file. It's a great choice for many projects that need persistence but want to avoid the overhead of setting up a standalone database.
## Configuration
This store has a dependency on the Rust toolchain which can be installed via [rustup](https://rustup.rs/). Once installed, add the `bridge_lmdb` dependency to your `mix.exs`.
```elixir
defp deps do
[
{:riptide, "~> 0.4.0"},
{:bridge_lmdb, "~> 0.1.1"}
]
end
```
And then you can configure the store:
```elixir
config :riptide,
store: %{
read: {Riptide.Store.LMDB, directory: "data"},
write: {Riptide.Store.LMDB, directory: "data"},
}
```
## Options
- `:directory` - directory where the database is stored (required)
"""
@behaviour Riptide.Store
@delimiter "×"
@impl true
def init(directory: directory) do
{:ok, env} = Bridge.LMDB.open_env(directory)
:persistent_term.put({:riptide, directory}, env)
:ok
end
defp env(directory: directory) do
:persistent_term.get({:riptide, directory})
end
@impl true
def mutation(merges, deletes, opts) do
env = env(opts)
Bridge.LMDB.batch_write(
env,
Enum.map(merges, fn {path, value} ->
{encode_path(path), Jason.encode!(value)}
end),
Enum.flat_map(deletes, fn {path, _} ->
env
|> iterate(path, %{})
|> Enum.map(fn {path, _val} -> path end)
end)
)
end
@impl true
def query(layers, opts) do
env = env(opts)
layers
|> Stream.map(fn {path, opts} ->
{
path,
env
|> iterate(path, opts)
|> Stream.map(fn {path, value} -> {decode_path(path), Jason.decode!(value)} end)
}
end)
end
def iterate(env, path, opts) do
combined = Enum.join(path, @delimiter)
{min, max} = Riptide.Store.Prefix.range(combined, opts)
min = Enum.join(min, @delimiter)
max = Enum.join(max, @delimiter)
{:ok, tx} = Bridge.LMDB.txn_read_new(env)
exact =
tx
|> Bridge.LMDB.get(min)
|> case do
{:ok, value} -> [{min, value}]
_ -> []
end
:ok = Bridge.LMDB.txn_read_abort(tx)
Stream.concat(
exact,
Bridge.LMDB.stream(env, min <> @delimiter, max)
)
end
defp encode_path(path) do
Enum.join(path, @delimiter)
end
defp decode_path(input) do
String.split(input, @delimiter)
end
end
|
packages/elixir/lib/riptide/store/store_lmdb.ex
| 0.88257
| 0.872619
|
store_lmdb.ex
|
starcoder
|
defmodule Maple.Helpers do
@moduledoc """
Helper functions to create the dybamic function in the macro. Helps
keep the code somewhat clean and maintainable
"""
require Logger
def apply_type(response, func) do
result = response[func[:name]]
cond do
is_list(result) ->
result
|> Enum.map(&(Kernel.struct(return_type(func[:type]), atomize_keys(&1))))
true ->
Kernel.struct(return_type(func[:type]), atomize_keys(result))
end
end
@doc """
Creates a custom map out of the parsed function data which is easier
to work with when creating custom functions.
"""
@spec assign_function_params(map()) :: map()
def assign_function_params(func) do
%{
name: func["name"],
arguments: func["args"],
function_name: String.to_atom(Macro.underscore(func["name"])),
required_params: get_required_params(func["args"]),
param_types: get_param_types(func["args"]),
deprecated: func["isDeprecated"],
deprecated_reason: func["deprecationReason"],
description: generate_help(func),
type: determin_type(func["type"])
}
end
@doc """
Creates a string to interpolate into the query or mutation that represents the
variables defined in the variables dictionary. Ex. `id: $id`
"""
@spec declare_params(map()) :: String.t
def declare_params(params) do
params
|> Enum.map(fn {k, _v} ->
"#{k}: $#{k}"
end)
|> Enum.join(", ")
end
@doc """
Declares all the variables and their types that will be used inside the specific
function
"""
@spec declare_variables(map(), map()) :: String.t
def declare_variables(params, types) do
params
|> Enum.map(fn {k, _v} ->
"$#{k}: #{types[Atom.to_string(k)]}!"
end)
|> Enum.join(", ")
end
@doc """
Emits a log warning if the function has been marked deprecared
"""
@spec deprecated?(boolean(), String.t, String.t) :: nil
def deprecated?(true, name, reason) do
Logger.warn("Deprecation warning - function #{name} is deprecated for the following reason: #{reason}.")
nil
end
def deprecated?(false, _, _), do: nil
@doc """
Finds all parameters that are missing from the required parameters list
"""
@spec find_missing(map(), list()) :: list()
def find_missing(params, required_params) do
required_params
|> Enum.reduce([], fn key, list ->
if Enum.member?(Map.keys(params), key), do: list, else: list ++ [key]
end)
end
@doc """
Returns a map of the GraphQL declared types for the arguments
"""
@spec get_param_types(map()) :: map()
def get_param_types(args) do
args
|> Enum.reduce(%{}, fn arg, c ->
Map.put(c, arg["name"], determin_type(arg["type"]))
end)
end
@doc """
Returns all the parameters flagged as required
"""
@spec get_required_params(map()) :: list()
def get_required_params(args) do
args
|> Enum.filter(&(&1["type"]["kind"] == "NON_NULL"))
|> Enum.map(&(String.to_atom(&1["name"])))
end
@doc """
Remove all the whitespaces from a string
"""
@spec remove_whitespaces(String.t()) :: String.t()
def remove_whitespaces(string) do
string
|> String.replace("\n", "")
|> String.replace(" ", "")
end
def return_type(type) do
type =
type
|> String.trim("[")
|> String.trim("]")
["Maple", "Types", type] |> Module.concat() |> Kernel.struct()
end
def stringify_struct(obj) do
keys =
obj
|> Map.keys()
|> List.delete(:__struct__)
|> Enum.join(",")
"#{keys}"
end
defp atomize_keys(map) do
for {key, val} <- map, into: %{}, do: {String.to_atom(key), val}
end
@doc """
Determins the type when the type is not explicitly defined.
Falls back on String type.
"""
@spec determin_type(map()) :: String.t
defp determin_type(type) do
cond do
empty?(type["ofType"])
-> type["name"]
type["kind"] == "LIST"
-> "[#{determin_type(type["ofType"])}]"
Map.has_key?(type["ofType"], "ofType")
-> determin_type(type["ofType"])
true
-> "String"
end
end
@doc """
Checks if a string is empty
"""
@spec empty?(String.t) :: boolean()
defp empty?(nil), do: true
defp empty?(_), do: false
@doc """
Creates a help string for a function
"""
@spec generate_help(map()) :: String.t
defp generate_help(func) do
"""
#{if(func["description"], do: func["description"], else: "No description available")}
\n
"""
<>
(func["args"]
|> Enum.map(fn arg ->
"""
Param name: #{arg["name"]}
- Description: #{if(arg["description"], do: arg["description"], else: "No description ")}
- Type: #{determin_type(arg["type"])}
- Required: #{if(arg["type"]["kind"] == "NON_NULL", do: "Yes", else: "No")}
"""
end)
|> Enum.join("\n"))
end
end
|
lib/maple/helpers.ex
| 0.708313
| 0.459743
|
helpers.ex
|
starcoder
|
defmodule BinaryHeap do
defstruct data: [], comparator: nil
@moduledoc """
BinaryHeapの実装
## ノード `n` (ノード番号0から開始)
`parent node` : div(n - 1, 2)
`children node` : 2n + 1, 2n + 2
## ノード `n` (ノード番号1から開始)
`parent node` : div(n, 2)
`children node` : 2n, 2n + 1
"""
@behaviour Heapable
@type t() :: %BinaryHeap{}
@doc """
## Examples
iex> BinaryHeap.new(&(&1 < &2))
%BinaryHeap{
data: [],
comparator: &(&1 < &2)
}
iex> BinaryHeap.new(&(&1 > &2))
%BinaryHeap{
data: [],
comparator: &(&1 > &2)
}
"""
def new(comparator), do: %__MODULE__{comparator: comparator}
@doc """
## Examples
iex> BinaryHeap.new(&(&1 < &2))
...> |> BinaryHeap.empty?()
true
iex> BinaryHeap.new(&(&1 < &2))
...> |> BinaryHeap.push(1)
...> |> BinaryHeap.empty?()
false
iex> BinaryHeap.new(&(&1 < &2))
...> |> BinaryHeap.push(1)
...> |> BinaryHeap.pop()
...> |> BinaryHeap.empty?()
true
"""
def empty?(%__MODULE__{data: []}), do: true
def empty?(%__MODULE__{}), do: false
@doc """
## Examples
iex> BinaryHeap.new(&(&1 < &2))
...> |> BinaryHeap.size()
0
iex> BinaryHeap.new(&(&1 < &2))
...> |> BinaryHeap.push(1)
...> |> BinaryHeap.size()
1
iex> BinaryHeap.new(&(&1 < &2))
...> |> BinaryHeap.push(1)
...> |> BinaryHeap.pop()
...> |> BinaryHeap.size()
0
iex> 1..10
...> |> Enum.into(BinaryHeap.new(&(&1 < &2)))
...> |> BinaryHeap.size()
10
iex> 1..10
...> |> Enum.into(BinaryHeap.new(&(&1 < &2)))
...> |> BinaryHeap.pop()
...> |> BinaryHeap.size()
9
"""
def size(%__MODULE__{data: data}) do
length(data)
end
@doc """
## Examples
iex> BinaryHeap.new(&(&1 < &2))
...> |> BinaryHeap.top()
nil
iex> BinaryHeap.new(&(&1 < &2))
...> |> BinaryHeap.push(1)
...> |> BinaryHeap.top()
1
iex> 1..10
...> |> Enum.into(BinaryHeap.new(&(&1 < &2)))
...> |> BinaryHeap.top()
1
iex> 1..10
...> |> Enum.into(BinaryHeap.new(&(&1 < &2)))
...> |> BinaryHeap.pop()
...> |> BinaryHeap.top()
2
iex> 1..10
...> |> Enum.into(BinaryHeap.new(&(&1 > &2)))
...> |> BinaryHeap.top()
10
iex> 1..10
...> |> Enum.into(BinaryHeap.new(&(&1 > &2)))
...> |> BinaryHeap.pop()
...> |> BinaryHeap.top()
9
"""
def top(%__MODULE__{data: []}), do: nil
def top(%__MODULE__{data: [t | _]}), do: t
@doc """
## Examples
iex> BinaryHeap.new(&(&1 < &2))
...> |> BinaryHeap.pop()
nil
iex> BinaryHeap.new(&(&1 < &2))
...> |> BinaryHeap.push(1)
...> |> BinaryHeap.pop()
%BinaryHeap{data: [], comparator: &(&1 < &2)}
iex> 1..5
...> |> Enum.shuffle()
...> |> Enum.into(
...> BinaryHeap.new(&(&1 < &2))
...> )
...> |> Enum.to_list()
[1, 2, 3, 4, 5]
iex> 1..10
...> |> Enum.shuffle()
...> |> Enum.into(
...> BinaryHeap.new(&(&1 < &2))
...> )
...> |> Enum.to_list()
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
iex> 1..5
...> |> Enum.shuffle()
...> |> Enum.into(
...> BinaryHeap.new(&(&1 > &2))
...> )
...> |> Enum.to_list()
[5, 4, 3, 2, 1]
iex> 1..10
...> |> Enum.shuffle()
...> |> Enum.into(
...> BinaryHeap.new(&(&1 > &2))
...> )
...> |> Enum.to_list()
[10, 9, 8, 7, 6, 5, 4, 3, 2, 1]
"""
def pop(%__MODULE__{data: []}), do: nil
def pop(%__MODULE__{data: [_]} = heap), do: %{ heap | data: []}
def pop(%__MODULE__{data: [_ | tail], comparator: comp} = heap) do
%{
heap |
data: do_pop([List.last(tail) | List.delete_at(tail, -1)], comp)
}
end
defp do_pop(data, comp, n \\ 0) do
parent = Enum.at(data, n)
left_index = index(:left_child, n)
right_index = index(:right_child, n)
{Enum.at(data, left_index), Enum.at(data, right_index)}
|> case do
{nil, nil} ->
data
{left, nil} ->
if comp.(parent, left), do: data, else: {left_index, left}
{left, right} ->
if comp.(left, right) do
if comp.(parent, left), do: data, else: {left_index, left}
else
if comp.(parent, right), do: data, else: {right_index, right}
end
end
|> case do
{index, value} ->
data
|> List.replace_at(index, parent)
|> List.replace_at(n, value)
|> do_pop(comp, index)
data when is_list(data) ->
data
end
end
@doc """
## Examples
iex> BinaryHeap.new(&(&1 < &2))
...> |> BinaryHeap.push(1)
%BinaryHeap{
data: [1],
comparator: &(&1 < &2)
}
iex> 1..5
...> |> Enum.shuffle()
...> |> Enum.into(
...> BinaryHeap.new(&(&1 < &2))
...> )
...> |> BinaryHeap.size()
5
iex> 1..10
...> |> Enum.shuffle()
...> |> Enum.into(
...> BinaryHeap.new(&(&1 < &2))
...> )
...> |> BinaryHeap.size()
10
"""
def push(%__MODULE__{data: data, comparator: comp} = heap, value) do
arr = data ++ [value]
%{
heap |
data: do_push(arr, length(arr) - 1, comp)
}
end
defp do_push(arr, n, comp)
defp do_push(arr, 0, _comp), do: arr
defp do_push(arr, n, comp) do
pi = index(:parent, n)
parent = Enum.at(arr, pi)
child = Enum.at(arr, n)
if (comp.(parent, child)) do
arr
else
arr
|> List.replace_at(pi, child)
|> List.replace_at(n, parent)
|> do_push(pi, comp)
end
end
defp index(:parent, n), do: div(n, 2)
defp index(:left_child, n), do: 2 * n + 1
defp index(:right_child, n), do: 2 * n + 2
defimpl Collectable do
@spec into(BinaryHeap.t()) :: {list, function}
def into(heap) do
{
heap,
fn
heap, {:cont, v} -> BinaryHeap.push(heap, v)
heap, :done -> heap
_heap, :halt -> :ok
end
}
end
end
defimpl Enumerable do
@spec count(BinaryHeap.t()) :: non_neg_integer()
def count(heap), do: {:ok, BinaryHeap.size(heap)}
@spec member?(BinaryHeap.t(), term()) :: {:error, module()}
def member?(_, _), do: {:error, __MODULE__}
@spec slice(BinaryHeap.t()) :: {:error, module()}
def slice(_), do: {:error, __MODULE__}
@spec reduce(BinaryHeap.t(), Enumerable.acc(), Enumerable.reducer()) :: Enumerable.result()
def reduce(_heap, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(heap, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(heap, &1, fun)}
def reduce(%BinaryHeap{data: []}, {:cont, acc}, _fun), do: {:done, acc}
def reduce(heap, {:cont, acc}, fun) do
reduce(BinaryHeap.pop(heap), fun.(BinaryHeap.top(heap), acc), fun)
end
end
end
|
lib/algs/heap/binary_heap.ex
| 0.817064
| 0.484441
|
binary_heap.ex
|
starcoder
|
defmodule Telemetry.Metrics.ConsoleReporter do
@moduledoc """
A reporter that prints events and metrics to the terminal.
This is useful for debugging and discovering all available
measurements and metadata in an event.
For example, imagine the given metrics:
metrics = [
last_value("vm.memory.binary", unit: :byte),
counter("vm.memory.total")
]
A console reporter can be started as a child of your your supervision
tree as:
{Telemetry.Metrics.ConsoleReporter, metrics: metrics}
Now when the "vm.memory" telemetry event is dispatched, we will see
reports like this:
[Telemetry.Metrics.ConsoleReporter] Got new event!
Event name: vm.memory
All measurements: %{binary: 100, total: 200}
All metadata: %{}
Metric measurement: :binary (last_value)
With value: 100 byte
And tag values: %{}
Metric measurement: :total (counter)
With value: 200
And tag values: %{}
In other words, every time there is an event for any of the registered
metrics, it prints the event measurement and metadata, and then it prints
information about each metric to the user.
"""
use GenServer
require Logger
def start_link(opts) do
server_opts = Keyword.take(opts, [:name])
device = opts[:device] || :stdio
metrics =
opts[:metrics] ||
raise ArgumentError, "the :metrics option is required by #{inspect(__MODULE__)}"
GenServer.start_link(__MODULE__, {metrics, device}, server_opts)
end
@impl true
def init({metrics, device}) do
Process.flag(:trap_exit, true)
groups = Enum.group_by(metrics, & &1.event_name)
for {event, metrics} <- groups do
id = {__MODULE__, event, self()}
:telemetry.attach(id, event, &handle_event/4, {metrics, device})
end
{:ok, Map.keys(groups)}
end
@impl true
def terminate(_, events) do
for event <- events do
:telemetry.detach({__MODULE__, event, self()})
end
:ok
end
defp handle_event(event_name, measurements, metadata, {metrics, device}) do
prelude = """
[#{inspect(__MODULE__)}] Got new event!
Event name: #{Enum.join(event_name, ".")}
All measurements: #{inspect(measurements)}
All metadata: #{inspect(metadata)}
"""
parts =
for %struct{} = metric <- metrics do
header = """
Metric measurement: #{inspect(metric.measurement)} (#{metric(struct)})
"""
[
header
| try do
measurement = extract_measurement(metric, measurements)
tags = extract_tags(metric, metadata)
cond do
is_nil(measurement) ->
"""
No value available (metric skipped)
"""
metric.__struct__ == Telemetry.Metrics.Counter ->
"""
Tag values: #{inspect(tags)}
"""
true ->
"""
With value: #{inspect(measurement)}#{unit(metric.unit)}#{info(measurement)}
Tag values: #{inspect(tags)}
"""
end
rescue
e ->
Logger.error([
"Could not format metric #{inspect(metric)}\n",
Exception.format(:error, e, System.stacktrace())
])
"""
Errored when processing! (metric skipped)
"""
end
]
end
IO.puts(device, [prelude | parts])
end
defp extract_measurement(metric, measurements) do
case metric.measurement do
fun when is_function(fun, 1) -> fun.(measurements)
key -> measurements[key]
end
end
defp info(int) when is_number(int), do: ""
defp info(_), do: " (WARNING! measurement should be a number)"
defp unit(:unit), do: ""
defp unit(unit), do: " #{unit}"
defp metric(Telemetry.Metrics.Counter), do: "counter"
defp metric(Telemetry.Metrics.Distribution), do: "distribution"
defp metric(Telemetry.Metrics.LastValue), do: "last_value"
defp metric(Telemetry.Metrics.Sum), do: "sum"
defp metric(Telemetry.Metrics.Summary), do: "summary"
defp extract_tags(metric, metadata) do
tag_values = metric.tag_values.(metadata)
Map.take(tag_values, metric.tags)
end
end
|
lib/telemetry_metrics/console_reporter.ex
| 0.776877
| 0.443661
|
console_reporter.ex
|
starcoder
|
defmodule Ratatouille.Runtime do
@moduledoc """
A runtime for apps implementing the `Ratatouille.App` behaviour. See
`Ratatouille.App` for details on how to build apps.
## Runtime Context
The runtime provides a map with additional context to the app's
`c:Ratatouille.App.init/1` callback. This can be used, for example,
to get information about the terminal window. Currently, the following
attributes are provided via the map:
* `:window`: A map with `:height` and `:width` keys.
## Shutdown Options
* `:window`: Resets the terminal window and stops the runtime process.
Afterwards the system will still be running unless stopped elsewhere.
* `{:system, :stop}`: Resets the terminal window and gracefully stops the
system (calls `:init.stop/0`). Gracefully shutting down the VM ensures that
all applications are stopped, but it takes at least one second which results
in a noticeable lag.
* `{:system, :halt}`: Resets the terminal window and immediately halts the
system (calls `:erlang.halt/0`). Applications are not cleanly stopped, but
this maybe be reasonable for some use cases.
* `{:application, name}` - Resets the terminal window and stops the named
application. Afterwards the rest of the system will still be running unless
stopped elsewhere.
"""
use Task, restart: :transient
alias Ratatouille.{EventManager, Window}
alias Ratatouille.Runtime.{Command, State, Subscription}
import Ratatouille.Constants, only: [event_type: 1, key: 1]
require Logger
@default_interval_ms 500
@default_quit_events [
{:ch, ?q},
{:ch, ?Q},
{:key, key(:ctrl_c)}
]
@resize_event event_type(:resize)
@doc """
Starts the application runtime given a module defining a Ratatouille terminal
application.
## Configuration
* `:app` (required) - The `Ratatouille.App` to run.
* `:shutdown` - The strategy for stopping the terminal application when a quit
event is received.
* `:interval` - The runtime loop interval in milliseconds. The default
interval is 500 ms. A subscription can be fulfilled at most once every
interval, so this effectively configures the maximum subscription
resolution that's possible.
* `:quit_events` - Specifies the events that should quit the terminal
application. Given as a list of tuples conforming where each tuple conforms
to either `{:ch, ch}` or `{:key, key}`. If not specified, ctrl-c and q / Q
can be used to quit the application by default.
"""
@spec start_link(Keyword.t()) :: {:ok, pid()}
def start_link(config) do
state = %State{
app: Keyword.fetch!(config, :app),
event_manager: Keyword.get(config, :event_manager, EventManager),
window: Keyword.get(config, :window, Window),
shutdown: Keyword.get(config, :shutdown, :window),
interval: Keyword.get(config, :interval, @default_interval_ms),
quit_events: Keyword.get(config, :quit_events, @default_quit_events)
}
Task.start_link(__MODULE__, :run, [state])
end
@spec run(State.t()) :: :ok
def run(state) do
:ok = EventManager.subscribe(state.event_manager, self())
model = initial_model(state)
subscriptions =
if function_exported?(state.app, :subscribe, 1) do
model |> state.app.subscribe() |> Subscription.to_list()
else
[]
end
loop(%State{state | model: model, subscriptions: subscriptions})
rescue
# We rescue any exceptions so that we can be sure they're printed to the
# screen.
e ->
formatted_exception = Exception.format(:error, e, __STACKTRACE__)
abort(
state.window,
"Error in application loop:\n #{formatted_exception}"
)
end
defp loop(state) do
:ok = Window.update(state.window, state.app.render(state.model))
receive do
{:event, %{type: @resize_event} = event} ->
state
|> process_update({:resize, event})
|> loop()
{:event, event} ->
if quit_event?(state.quit_events, event) do
shutdown(state)
else
state
|> process_update({:event, event})
|> loop()
end
{:command_result, message} ->
state
|> process_update(message)
|> loop()
after
state.interval ->
state
|> process_subscriptions()
|> loop()
end
end
defp initial_model(state) do
ctx = context(state)
case state.app.init(ctx) do
{model, %Command{} = command} ->
:ok = process_command_async(command)
model
model ->
model
end
end
defp process_update(state, message) do
case state.app.update(state.model, message) do
{model, %Command{} = command} ->
:ok = process_command_async(command)
%State{state | model: model}
model ->
%State{state | model: model}
end
end
defp process_subscriptions(state) do
{new_subs, new_state} =
Enum.map_reduce(state.subscriptions, state, fn sub, state_acc ->
process_subscription(state_acc, sub)
end)
%State{new_state | subscriptions: new_subs}
end
defp process_subscription(state, sub) do
case sub do
%Subscription{type: :interval, data: {interval_ms, last_at_ms}} ->
now = :erlang.monotonic_time(:millisecond)
if last_at_ms + interval_ms <= now do
new_sub = %Subscription{sub | data: {interval_ms, now}}
new_state = process_update(state, sub.message)
{new_sub, new_state}
else
{sub, state}
end
_ ->
{sub, state}
end
end
defp process_command_async(command) do
runtime_pid = self()
for cmd <- Command.to_list(command) do
# TODO: This is missing a few things:
# - Need to capture failures and report them via the update/2 callback.
# - Could be as simple as {:ok, result} | {:error, error}
# - Should provide a timeout mechanism with sensible defaults. This should
# help prevent h
{:ok, _pid} =
Task.start(fn ->
result = cmd.function.()
send(runtime_pid, {:command_result, {cmd.message, result}})
end)
end
:ok
end
defp context(state) do
%{window: window_info(state.window)}
end
defp window_info(window) do
{:ok, height} = Window.fetch(window, :height)
{:ok, width} = Window.fetch(window, :width)
%{height: height, width: width}
end
defp abort(window, error_msg) do
:ok = Window.close(window)
Logger.error(error_msg)
Logger.warn(
"The Ratatouille termbox window was automatically closed due " <>
"to an error (you may need to quit Erlang manually)."
)
:ok
end
defp shutdown(state) do
:ok = Window.close(state.window)
case state.shutdown do
{:application, app} -> Application.stop(app)
{:system, :stop} -> System.stop()
{:system, :halt} -> System.halt()
:window -> :ok
end
end
defp quit_event?([], _event), do: false
defp quit_event?([{:ch, ch} | _], %{ch: ch}), do: true
defp quit_event?([{:key, key} | _], %{key: key}), do: true
defp quit_event?([_ | events], event), do: quit_event?(events, event)
end
|
lib/ratatouille/runtime.ex
| 0.84497
| 0.521045
|
runtime.ex
|
starcoder
|
defmodule Xandra.Protocol.V4 do
@moduledoc false
use Bitwise
require Decimal
alias Xandra.{
Batch,
Error,
Frame,
Page,
Prepared,
Simple,
TypeParser
}
alias Xandra.Cluster.{StatusChange, TopologyChange}
@unix_epoch_days 0x80000000
# We need these two macros to make
# a single match context possible.
defmacrop decode_string({:<-, _, [value, buffer]}) do
quote do
<<size::16, unquote(value)::size(size)-bytes, unquote(buffer)::bits>> = unquote(buffer)
end
end
defmacrop decode_uuid({:<-, _, [value, buffer]}) do
quote do
<<unquote(value)::16-bytes, unquote(buffer)::bits>> = unquote(buffer)
end
end
defmacrop decode_value({:<-, _, [value, buffer]}, type, do: block) do
quote do
<<size::32-signed, unquote(buffer)::bits>> = unquote(buffer)
if size < 0 do
unquote(value) = nil
unquote(block)
else
<<data::size(size)-bytes, unquote(buffer)::bits>> = unquote(buffer)
unquote(value) = decode_value(data, unquote(type))
unquote(block)
end
end
end
@spec encode_request(Frame.t(kind), term, keyword) :: Frame.t(kind) when kind: var
def encode_request(frame, params, options \\ [])
def encode_request(%Frame{kind: :options} = frame, nil, _options) do
%{frame | body: []}
end
def encode_request(%Frame{kind: :startup} = frame, requested_options, _options)
when is_map(requested_options) do
%{frame | body: encode_string_map(requested_options)}
end
def encode_request(%Frame{kind: :auth_response} = frame, _requested_options, options) do
case Keyword.fetch(options, :authentication) do
{:ok, authentication} ->
with {authenticator, auth_options} <- authentication,
body <- authenticator.response_body(auth_options) do
%{frame | body: [<<IO.iodata_length(body)::32>>, body]}
else
_ ->
raise "the :authentication option must be " <>
"an {auth_module, auth_options} tuple, " <> "got: #{inspect(authentication)}"
end
:error ->
raise "Cassandra server requires authentication but the :authentication option was not provided"
end
end
def encode_request(%Frame{kind: :register} = frame, events, _options) when is_list(events) do
%{frame | body: encode_string_list(events)}
end
def encode_request(%Frame{kind: :query} = frame, %Simple{} = query, options) do
%{statement: statement, values: values} = query
body = [
<<byte_size(statement)::32>>,
statement,
encode_params([], values, options, query.default_consistency, _skip_metadata? = false)
]
%{frame | body: body}
end
def encode_request(%Frame{kind: :prepare} = frame, %Prepared{} = prepared, _options) do
%{statement: statement} = prepared
%{frame | body: [<<byte_size(statement)::32>>, statement]}
end
def encode_request(%Frame{kind: :execute} = frame, %Prepared{} = prepared, options) do
%{id: id, bound_columns: columns, values: values} = prepared
skip_metadata? = prepared.result_columns != nil
body = [
<<byte_size(id)::16>>,
id,
encode_params(columns, values, options, prepared.default_consistency, skip_metadata?)
]
%{frame | body: body}
end
def encode_request(%Frame{kind: :batch} = frame, %Batch{} = batch, options) do
%{queries: queries, type: type} = batch
consistency = Keyword.get(options, :consistency, batch.default_consistency)
serial_consistency = Keyword.get(options, :serial_consistency)
timestamp = Keyword.get(options, :timestamp)
flags =
0x00
|> set_flag(_serial_consistency = 0x10, serial_consistency)
|> set_flag(_default_timestamp = 0x20, timestamp)
encoded_queries = [<<length(queries)::16>>] ++ Enum.map(queries, &encode_query_in_batch/1)
body = [
encode_batch_type(type),
encoded_queries,
encode_consistency_level(consistency),
flags,
encode_serial_consistency(serial_consistency),
if(timestamp, do: <<timestamp::64>>, else: [])
]
%{frame | body: body}
end
defp encode_batch_type(:logged), do: 0
defp encode_batch_type(:unlogged), do: 1
defp encode_batch_type(:counter), do: 2
defp encode_string_list(list) do
parts = for string <- list, do: [<<byte_size(string)::16>>, string]
[<<length(list)::16>>] ++ parts
end
defp encode_string_map(map) do
parts =
for {key, value} <- map do
[<<byte_size(key)::16>>, key, <<byte_size(value)::16>>, value]
end
[<<map_size(map)::16>>] ++ parts
end
consistency_levels = %{
0x0000 => :any,
0x0001 => :one,
0x0002 => :two,
0x0003 => :three,
0x0004 => :quorum,
0x0005 => :all,
0x0006 => :local_quorum,
0x0007 => :each_quorum,
0x0008 => :serial,
0x0009 => :local_serial,
0x000A => :local_one
}
for {spec, level} <- consistency_levels do
defp encode_consistency_level(unquote(level)) do
<<unquote(spec)::16>>
end
end
defp set_flag(mask, bit, value) do
if value do
mask ||| bit
else
mask
end
end
defp set_query_values_flag(mask, values) do
cond do
values == [] or values == %{} ->
mask
is_list(values) ->
mask ||| 0x01
is_map(values) ->
mask ||| 0x01 ||| 0x40
end
end
defp encode_params(columns, values, options, default_consistency, skip_metadata?) do
consistency = Keyword.get(options, :consistency, default_consistency)
page_size = Keyword.get(options, :page_size, 10_000)
paging_state = Keyword.get(options, :paging_state)
serial_consistency = Keyword.get(options, :serial_consistency)
timestamp = Keyword.get(options, :timestamp)
flags =
0x00
|> set_query_values_flag(values)
|> set_flag(_page_size = 0x04, true)
|> set_flag(_metadata_presence = 0x02, skip_metadata?)
|> set_flag(_paging_state = 0x08, paging_state)
|> set_flag(_serial_consistency = 0x10, serial_consistency)
|> set_flag(_default_timestamp = 0x20, timestamp)
encoded_values =
if values == [] or values == %{} do
[]
else
encode_query_values(columns, values)
end
[
encode_consistency_level(consistency),
flags,
encoded_values,
<<page_size::32>>,
encode_paging_state(paging_state),
encode_serial_consistency(serial_consistency),
if(timestamp, do: <<timestamp::64>>, else: [])
]
end
defp encode_paging_state(value) do
if value do
[<<byte_size(value)::32>>, value]
else
[]
end
end
defp encode_serial_consistency(nil) do
[]
end
defp encode_serial_consistency(consistency) when consistency in [:serial, :local_serial] do
encode_consistency_level(consistency)
end
defp encode_serial_consistency(other) do
raise ArgumentError,
"the :serial_consistency option must be either :serial or :local_serial, " <>
"got: #{inspect(other)}"
end
defp encode_query_in_batch(%Simple{statement: statement, values: values}) do
[
_kind = 0,
<<byte_size(statement)::32>>,
statement,
encode_query_values([], values)
]
end
defp encode_query_in_batch(%Prepared{id: id, bound_columns: bound_columns, values: values}) do
[
_kind = 1,
<<byte_size(id)::16>>,
id,
encode_query_values(bound_columns, values)
]
end
defp encode_query_values([], values) when is_list(values) do
[<<length(values)::16>>] ++ Enum.map(values, &encode_query_value/1)
end
defp encode_query_values([], values) when is_map(values) do
parts =
for {name, value} <- values do
name = to_string(name)
[<<byte_size(name)::16>>, name, encode_query_value(value)]
end
[<<map_size(values)::16>>] ++ parts
end
defp encode_query_values(columns, values) when is_list(values) do
encode_bound_values(columns, values, [<<length(columns)::16>>])
end
defp encode_query_values(columns, values) when map_size(values) == length(columns) do
parts =
for {_keyspace, _table, name, type} <- columns do
value = Map.fetch!(values, name)
[<<byte_size(name)::16>>, name, encode_query_value(type, value)]
end
[<<map_size(values)::16>>] ++ parts
end
defp encode_bound_values([], [], acc) do
acc
end
defp encode_bound_values([column | columns], [value | values], acc) do
{_keyspace, _table, _name, type} = column
acc = [acc | encode_query_value(type, value)]
encode_bound_values(columns, values, acc)
end
defp encode_query_value({type, value}) when is_binary(type) do
encode_query_value(TypeParser.parse(type), value)
end
defp encode_query_value(_type, nil) do
<<-1::32>>
end
defp encode_query_value(_type, :not_set) do
<<-2::32>>
end
defp encode_query_value(type, value) do
acc = encode_value(type, value)
[<<IO.iodata_length(acc)::32>>, acc]
end
defp encode_value(:ascii, value) when is_binary(value) do
value
end
defp encode_value(:bigint, value) when is_integer(value) do
<<value::64>>
end
defp encode_value(:blob, value) when is_binary(value) do
value
end
defp encode_value(:boolean, value) do
case value do
true -> [1]
false -> [0]
end
end
defp encode_value(:counter, value) when is_integer(value) do
<<value::64>>
end
defp encode_value(:date, %Date{} = value) do
value = date_to_unix_days(value)
<<value + @unix_epoch_days::32>>
end
defp encode_value(:date, value) when value in -@unix_epoch_days..(@unix_epoch_days - 1) do
<<value + @unix_epoch_days::32>>
end
defp encode_value(:decimal, {value, scale}) do
[encode_value(:int, scale), encode_value(:varint, value)]
end
# Decimal stores the decimal as "sign * coef * 10^exp", but Cassandra stores it
# as "coef * 10^(-1 * exp).
defp encode_value(:decimal, decimal) do
if Decimal.is_decimal(decimal) do
%Decimal{coef: coef, exp: exp, sign: sign} = decimal
encode_value(:decimal, {_value = sign * coef, _scale = -exp})
else
raise ArgumentError,
"can only encode %Decimal{} structs or {value, scale} tuples as decimals"
end
end
defp encode_value(:double, value) when is_float(value) do
<<value::64-float>>
end
defp encode_value(:float, value) when is_float(value) do
<<value::32-float>>
end
defp encode_value(:inet, {n1, n2, n3, n4}) do
<<n1, n2, n3, n4>>
end
defp encode_value(:inet, {n1, n2, n3, n4, n5, n6, n7, n8}) do
<<n1::16, nfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, nfdf8:f53e:61e4::18, n4::16, n5::16, n6::16, n7::16, n8::16>>
end
defp encode_value(:int, value) when is_integer(value) do
<<value::32>>
end
defp encode_value({:list, [items_type]}, collection) when is_list(collection) do
[<<length(collection)::32>>] ++ Enum.map(collection, &encode_query_value(items_type, &1))
end
defp encode_value({:map, [key_type, value_type]}, collection) when is_map(collection) do
parts =
for {key, value} <- collection do
[
encode_query_value(key_type, key),
encode_query_value(value_type, value)
]
end
[<<map_size(collection)::32>>] ++ parts
end
defp encode_value({:set, inner_type}, %MapSet{} = collection) do
encode_value({:list, inner_type}, MapSet.to_list(collection))
end
defp encode_value(:smallint, value) when is_integer(value) do
<<value::16>>
end
defp encode_value(:time, %Time{} = time) do
value = time_to_nanoseconds(time)
<<value::64>>
end
defp encode_value(:time, value) when value in 0..86_399_999_999_999 do
<<value::64>>
end
defp encode_value(:timestamp, %DateTime{} = value) do
<<DateTime.to_unix(value, :millisecond)::64>>
end
defp encode_value(:timestamp, value) when is_integer(value) do
<<value::64>>
end
defp encode_value(:tinyint, value) when is_integer(value) do
<<value>>
end
defp encode_value({:udt, fields}, value) when is_map(value) do
for {field_name, [field_type]} <- fields do
encode_query_value(field_type, Map.get(value, field_name))
end
end
defp encode_value(type, value) when type in [:uuid, :timeuuid] and is_binary(value) do
case byte_size(value) do
16 ->
value
36 ->
<<
part1::8-bytes,
?-,
part2::4-bytes,
?-,
part3::4-bytes,
?-,
part4::4-bytes,
?-,
part5::12-bytes
>> = value
<<
decode_base16(part1)::4-bytes,
decode_base16(part2)::2-bytes,
decode_base16(part3)::2-bytes,
decode_base16(part4)::2-bytes,
decode_base16(part5)::6-bytes
>>
end
end
defp encode_value(type, value) when type in [:varchar, :text] and is_binary(value) do
value
end
defp encode_value(:varint, value) when is_integer(value) do
size = varint_byte_size(value)
<<value::size(size)-unit(8)>>
end
defp encode_value({:tuple, types}, value) when length(types) == tuple_size(value) do
for {type, item} <- Enum.zip(types, Tuple.to_list(value)), do: encode_query_value(type, item)
end
defp varint_byte_size(value) when value > 127 do
1 + varint_byte_size(value >>> 8)
end
defp varint_byte_size(value) when value < -128 do
varint_byte_size(-value - 1)
end
defp varint_byte_size(_value), do: 1
@compile {:inline, decode_base16: 1}
defp decode_base16(value) do
Base.decode16!(value, case: :mixed)
end
@compile {:inline, encode_base16: 1}
defp encode_base16(value) do
Base.encode16(value, case: :lower)
end
error_codes = %{
0x0000 => :server_failure,
0x000A => :protocol_violation,
0x0100 => :invalid_credentials,
0x1000 => :unavailable,
0x1001 => :overloaded,
0x1002 => :bootstrapping,
0x1003 => :truncate_failure,
0x1100 => :write_timeout,
0x1200 => :read_timeout,
0x1300 => :read_failure,
0x1400 => :function_failure,
0x2000 => :invalid_syntax,
0x2100 => :unauthorized,
0x2200 => :invalid,
0x2300 => :invalid_config,
0x2400 => :already_exists,
0x2500 => :unprepared
}
for {code, reason} <- error_codes do
defp decode_error_reason(<<unquote(code)::32-signed, buffer::bytes>>) do
{unquote(reason), buffer}
end
end
defp decode_error_message(_reason, buffer) do
decode_string(message <- buffer)
_ = buffer
message
end
@spec decode_response(Frame.t(:error), term) :: Error.t()
@spec decode_response(Frame.t(:ready), nil) :: :ok
@spec decode_response(Frame.t(:supported), nil) :: %{optional(String.t()) => [String.t()]}
@spec decode_response(Frame.t(:result), Simple.t() | Prepared.t() | Batch.t()) ::
Xandra.result() | Prepared.t()
def decode_response(frame, query \\ nil, options \\ [])
def decode_response(%Frame{kind: :error, body: body, warning: warning?}, _query, _options) do
body = decode_warnings(body, warning?)
{reason, buffer} = decode_error_reason(body)
Error.new(reason, decode_error_message(reason, buffer))
end
def decode_response(%Frame{kind: :ready, body: <<>>}, nil, _options) do
:ok
end
def decode_response(%Frame{kind: :supported, body: body}, nil, _options) do
{value, <<>>} = decode_string_multimap(body)
value
end
def decode_response(%Frame{kind: :event, body: body}, nil, _options) do
decode_string(event <- body)
case event do
"STATUS_CHANGE" ->
decode_string(effect <- body)
{address, port, <<>>} = decode_inet(body)
%StatusChange{effect: effect, address: address, port: port}
"TOPOLOGY_CHANGE" ->
decode_string(effect <- body)
{address, port, <<>>} = decode_inet(body)
%TopologyChange{effect: effect, address: address, port: port}
end
end
def decode_response(
%Frame{
kind: :result,
body: body,
tracing: tracing?,
atom_keys?: atom_keys?,
warning: warning?
},
%kind{} = query,
options
)
when kind in [Simple, Prepared, Batch] do
{body, tracing_id} = decode_tracing_id(body, tracing?)
body = decode_warnings(body, warning?)
decode_result_response(body, query, tracing_id, Keyword.put(options, :atom_keys?, atom_keys?))
end
# We decode to consume the warning from the body but we ignore the result
defp decode_warnings(body, _warning? = false) do
body
end
defp decode_warnings(body, _warning? = true) do
{_warnings, body} = decode_string_list(body)
body
end
defp decode_inet(<<size, data::size(size)-bytes, buffer::bits>>) do
address = decode_value(data, :inet)
<<port::32, buffer::bits>> = buffer
{address, port, buffer}
end
defp decode_tracing_id(body, _tracing? = false) do
{body, _tracing_id = nil}
end
defp decode_tracing_id(body, _tracing? = true) do
decode_uuid(tracing_id <- body)
{body, tracing_id}
end
# Void
defp decode_result_response(<<0x0001::32-signed>>, _query, tracing_id, _options) do
%Xandra.Void{tracing_id: tracing_id}
end
# Page
defp decode_result_response(<<0x0002::32-signed, buffer::bits>>, query, tracing_id, options) do
page = new_page(query)
{page, buffer} = decode_metadata(buffer, page, Keyword.fetch!(options, :atom_keys?))
columns = rewrite_column_types(page.columns, options)
%{page | content: decode_page_content(buffer, columns), tracing_id: tracing_id}
end
# SetKeyspace
defp decode_result_response(<<0x0003::32-signed, buffer::bits>>, _query, tracing_id, _options) do
decode_string(keyspace <- buffer)
<<>> = buffer
%Xandra.SetKeyspace{keyspace: keyspace, tracing_id: tracing_id}
end
# Prepared
defp decode_result_response(
<<0x0004::32-signed, buffer::bits>>,
%Prepared{} = prepared,
tracing_id,
options
) do
atom_keys? = Keyword.fetch!(options, :atom_keys?)
decode_string(id <- buffer)
{%{columns: bound_columns}, buffer} = decode_metadata_prepared(buffer, %Page{}, atom_keys?)
{%{columns: result_columns}, <<>>} = decode_metadata(buffer, %Page{}, atom_keys?)
%{
prepared
| id: id,
bound_columns: bound_columns,
result_columns: result_columns,
tracing_id: tracing_id
}
end
# SchemaChange
defp decode_result_response(<<0x0005::32-signed, buffer::bits>>, _query, tracing_id, _options) do
decode_string(effect <- buffer)
decode_string(target <- buffer)
options = decode_change_options(buffer, target)
%Xandra.SchemaChange{effect: effect, target: target, options: options, tracing_id: tracing_id}
end
# Since SELECT statements are not allowed in BATCH queries, there's no need to
# support %Batch{} in this function.
defp new_page(%Simple{}), do: %Page{}
defp new_page(%Prepared{result_columns: result_columns}), do: %Page{columns: result_columns}
defp rewrite_column_types(columns, options) do
Enum.map(columns, fn {_, _, _, type} = column ->
put_elem(column, 3, rewrite_type(type, options))
end)
end
defp rewrite_type({parent_type, types}, options) do
{parent_type, Enum.map(types, &rewrite_type(&1, options))}
end
defp rewrite_type(:date, options) do
{:date, [Keyword.get(options, :date_format, :date)]}
end
defp rewrite_type(:time, options) do
{:time, [Keyword.get(options, :time_format, :time)]}
end
defp rewrite_type(:timestamp, options) do
{:timestamp, [Keyword.get(options, :timestamp_format, :datetime)]}
end
defp rewrite_type(:decimal, options) do
{:decimal, [Keyword.get(options, :decimal_format, :tuple)]}
end
defp rewrite_type(:uuid, options) do
{:uuid, [Keyword.get(options, :uuid_format, :string)]}
end
defp rewrite_type(:timeuuid, options) do
{:timeuuid, [Keyword.get(options, :timeuuid_format, :string)]}
end
defp rewrite_type(type, _options), do: type
defp decode_change_options(<<buffer::bits>>, "KEYSPACE") do
decode_string(keyspace <- buffer)
<<>> = buffer
%{keyspace: keyspace}
end
defp decode_change_options(<<buffer::bits>>, target) when target in ["TABLE", "TYPE"] do
decode_string(keyspace <- buffer)
decode_string(subject <- buffer)
<<>> = buffer
%{keyspace: keyspace, subject: subject}
end
defp decode_change_options(<<buffer::bits>>, target) when target in ["FUNCTION", "AGGREGATE"] do
decode_string(keyspace <- buffer)
decode_string(subject <- buffer)
{values, buffer} = decode_string_list(buffer)
<<>> = buffer
%{keyspace: keyspace, subject: subject, arguments: values}
end
defp decode_metadata_prepared(
<<flags::4-bytes, column_count::32-signed, pk_count::32-signed, buffer::bits>>,
page,
atom_keys?
) do
<<_::31, global_table_spec::1>> = flags
# partition key bind indices are ignored as we do not support token-aware routing
{_indices, buffer} = decode_pk_index(buffer, pk_count, [])
cond do
global_table_spec == 1 ->
decode_string(keyspace <- buffer)
decode_string(table <- buffer)
{columns, buffer} =
decode_columns(buffer, column_count, {keyspace, table}, atom_keys?, [])
{%{page | columns: columns}, buffer}
true ->
{columns, buffer} = decode_columns(buffer, column_count, nil, atom_keys?, [])
{%{page | columns: columns}, buffer}
end
end
# metadate format from the "Rows" result response
defp decode_metadata(
<<flags::4-bytes, column_count::32-signed, buffer::bits>>,
page,
atom_keys?
) do
<<_::29, no_metadata::1, has_more_pages::1, global_table_spec::1>> = flags
{page, buffer} = decode_paging_state(buffer, page, has_more_pages)
cond do
no_metadata == 1 ->
{page, buffer}
global_table_spec == 1 ->
decode_string(keyspace <- buffer)
decode_string(table <- buffer)
{columns, buffer} =
decode_columns(buffer, column_count, {keyspace, table}, atom_keys?, [])
{%{page | columns: columns}, buffer}
true ->
{columns, buffer} = decode_columns(buffer, column_count, nil, atom_keys?, [])
{%{page | columns: columns}, buffer}
end
end
# pk = partition key
def decode_pk_index(buffer, 0, acc) do
{Enum.reverse(acc), buffer}
end
def decode_pk_index(<<index::16-unsigned, buffer::bits>>, pk_count, acc) do
decode_pk_index(buffer, pk_count - 1, [index | acc])
end
defp decode_paging_state(<<buffer::bits>>, page, 0) do
{page, buffer}
end
defp decode_paging_state(<<buffer::bits>>, page, 1) do
<<size::32, paging_state::size(size)-bytes, buffer::bits>> = buffer
{%{page | paging_state: paging_state}, buffer}
end
defp decode_page_content(<<row_count::32-signed, buffer::bits>>, columns) do
decode_page_content(buffer, row_count, columns, columns, [[]])
end
defp decode_page_content(<<>>, 0, columns, columns, [[] | acc]) do
Enum.reverse(acc)
end
defp decode_page_content(<<buffer::bits>>, row_count, columns, [], [values | acc]) do
decode_page_content(buffer, row_count - 1, columns, columns, [[], Enum.reverse(values) | acc])
end
defp decode_page_content(<<buffer::bits>>, row_count, columns, [{_, _, _, type} | rest], [
values | acc
]) do
decode_value(value <- buffer, type) do
values = [value | values]
decode_page_content(buffer, row_count, columns, rest, [values | acc])
end
end
defp decode_value(<<value>>, :boolean), do: value != 0
defp decode_value(<<value::signed>>, :tinyint), do: value
defp decode_value(<<value::16-signed>>, :smallint), do: value
defp decode_value(<<value::64>>, {:time, [format]}) do
case format do
:time -> time_from_nanoseconds(value)
:integer -> value
end
end
defp decode_value(<<value::64-signed>>, :bigint), do: value
defp decode_value(<<value::64-signed>>, :counter), do: value
defp decode_value(<<value::64-signed>>, {:timestamp, [format]}) do
case format do
:datetime -> DateTime.from_unix!(value, :millisecond)
:integer -> value
end
end
defp decode_value(<<value::32>>, {:date, [format]}) do
unix_days = value - @unix_epoch_days
case format do
:date -> date_from_unix_days(unix_days)
:integer -> unix_days
end
end
defp decode_value(<<value::32-signed>>, :int), do: value
defp decode_value(<<value::64-float>>, :double), do: value
defp decode_value(<<value::32-float>>, :float), do: value
defp decode_value(<<data::4-bytes>>, :inet) do
<<n1, n2, n3, n4>> = data
{n1, n2, n3, n4}
end
defp decode_value(<<data::16-bytes>>, :inet) do
<<n1::16, n2::16, n3::16, n4::16, n5::16, n6::16, n7::16, n8::16>> = data
{n1, n2, n3, n4, n5, n6, n7, n8}
end
defp decode_value(<<value::16-bytes>>, {uuid_type, [format]})
when uuid_type in [:uuid, :timeuuid] do
case format do
:binary ->
value
:string ->
<<part1::32, part2::16, part3::16, part4::16, part5::48>> = value
encode_base16(<<part1::32>>) <>
"-" <>
encode_base16(<<part2::16>>) <>
"-" <>
encode_base16(<<part3::16>>) <>
"-" <>
encode_base16(<<part4::16>>) <>
"-" <>
encode_base16(<<part5::48>>)
end
end
defp decode_value(<<scale::32-signed, data::bits>>, {:decimal, [format]}) do
value = decode_value(data, :varint)
case format do
:tuple ->
{value, scale}
:decimal ->
sign = if(value >= 0, do: 1, else: -1)
Decimal.new(sign, _coefficient = abs(value), _exponent = -scale)
end
end
defp decode_value(<<count::32-signed, data::bits>>, {:list, [type]}) do
decode_value_list(data, count, type, [])
end
defp decode_value(<<count::32-signed, data::bits>>, {:map, [key_type, value_type]}) do
decode_value_map_key(data, count, key_type, value_type, [])
end
defp decode_value(<<count::32-signed, data::bits>>, {:set, [type]}) do
data
|> decode_value_list(count, type, [])
|> MapSet.new()
end
defp decode_value(<<value::bits>>, :ascii), do: value
defp decode_value(<<value::bits>>, :blob), do: value
defp decode_value(<<value::bits>>, :varchar), do: value
# For legacy compatibility reasons, most non-string types support
# "empty" values (that is a value with zero length).
# An empty value is distinct from NULL, which is encoded with a negative length.
defp decode_value(<<>>, _type), do: nil
defp decode_value(<<data::bits>>, {:udt, fields}) do
decode_value_udt(data, fields, [])
end
defp decode_value(<<data::bits>>, {:tuple, types}) do
decode_value_tuple(data, types, [])
end
defp decode_value(<<data::bits>>, :varint) do
size = bit_size(data)
<<value::size(size)-signed>> = data
value
end
defp decode_value_udt(<<>>, fields, acc) do
for {field_name, _} <- fields, into: Map.new(acc), do: {field_name, nil}
end
defp decode_value_udt(<<buffer::bits>>, [{field_name, [field_type]} | rest], acc) do
decode_value(value <- buffer, field_type) do
decode_value_udt(buffer, rest, [{field_name, value} | acc])
end
end
defp decode_value_list(<<>>, 0, _type, acc) do
Enum.reverse(acc)
end
defp decode_value_list(<<buffer::bits>>, count, type, acc) do
decode_value(item <- buffer, type) do
decode_value_list(buffer, count - 1, type, [item | acc])
end
end
defp decode_value_map_key(<<>>, 0, _key_type, _value_type, acc) do
Map.new(acc)
end
defp decode_value_map_key(<<buffer::bits>>, count, key_type, value_type, acc) do
decode_value(key <- buffer, key_type) do
decode_value_map_value(buffer, count, key_type, value_type, [key | acc])
end
end
defp decode_value_map_value(<<buffer::bits>>, count, key_type, value_type, [key | acc]) do
decode_value(value <- buffer, value_type) do
decode_value_map_key(buffer, count - 1, key_type, value_type, [{key, value} | acc])
end
end
defp decode_value_tuple(<<buffer::bits>>, [type | types], acc) do
decode_value(item <- buffer, type) do
decode_value_tuple(buffer, types, [item | acc])
end
end
defp decode_value_tuple(<<>>, [], acc) do
acc |> Enum.reverse() |> List.to_tuple()
end
defp decode_columns(<<buffer::bits>>, 0, _table_spec, _atom_keys?, acc) do
{Enum.reverse(acc), buffer}
end
defp decode_columns(<<buffer::bits>>, column_count, nil, atom_keys?, acc) do
decode_string(keyspace <- buffer)
decode_string(table <- buffer)
decode_string(name <- buffer)
name = if atom_keys?, do: String.to_atom(name), else: name
{type, buffer} = decode_type(buffer)
entry = {keyspace, table, name, type}
decode_columns(buffer, column_count - 1, nil, atom_keys?, [entry | acc])
end
defp decode_columns(<<buffer::bits>>, column_count, table_spec, atom_keys?, acc) do
{keyspace, table} = table_spec
decode_string(name <- buffer)
name = if atom_keys?, do: String.to_atom(name), else: name
{type, buffer} = decode_type(buffer)
entry = {keyspace, table, name, type}
decode_columns(buffer, column_count - 1, table_spec, atom_keys?, [entry | acc])
end
defp decode_type(<<0x0001::16, buffer::bits>>) do
{:ascii, buffer}
end
defp decode_type(<<0x0002::16, buffer::bits>>) do
{:bigint, buffer}
end
defp decode_type(<<0x0003::16, buffer::bits>>) do
{:blob, buffer}
end
defp decode_type(<<0x0004::16, buffer::bits>>) do
{:boolean, buffer}
end
defp decode_type(<<0x0005::16, buffer::bits>>) do
{:counter, buffer}
end
defp decode_type(<<0x0006::16, buffer::bits>>) do
{:decimal, buffer}
end
defp decode_type(<<0x0007::16, buffer::bits>>) do
{:double, buffer}
end
defp decode_type(<<0x0008::16, buffer::bits>>) do
{:float, buffer}
end
defp decode_type(<<0x0009::16, buffer::bits>>) do
{:int, buffer}
end
defp decode_type(<<0x000B::16, buffer::bits>>) do
{:timestamp, buffer}
end
defp decode_type(<<0x000C::16, buffer::bits>>) do
{:uuid, buffer}
end
defp decode_type(<<0x000D::16, buffer::bits>>) do
{:varchar, buffer}
end
defp decode_type(<<0x000E::16, buffer::bits>>) do
{:varint, buffer}
end
defp decode_type(<<0x000F::16, buffer::bits>>) do
{:timeuuid, buffer}
end
defp decode_type(<<0x0010::16, buffer::bits>>) do
{:inet, buffer}
end
defp decode_type(<<0x0011::16, buffer::bits>>) do
{:date, buffer}
end
defp decode_type(<<0x0012::16, buffer::bits>>) do
{:time, buffer}
end
defp decode_type(<<0x0013::16, buffer::bits>>) do
{:smallint, buffer}
end
defp decode_type(<<0x0014::16, buffer::bits>>) do
{:tinyint, buffer}
end
defp decode_type(<<0x0020::16, buffer::bits>>) do
{type, buffer} = decode_type(buffer)
{{:list, [type]}, buffer}
end
defp decode_type(<<0x0021::16, buffer::bits>>) do
{key_type, buffer} = decode_type(buffer)
{value_type, buffer} = decode_type(buffer)
{{:map, [key_type, value_type]}, buffer}
end
defp decode_type(<<0x0022::16, buffer::bits>>) do
{type, buffer} = decode_type(buffer)
{{:set, [type]}, buffer}
end
defp decode_type(<<0x0030::16, buffer::bits>>) do
decode_string(_keyspace <- buffer)
decode_string(_name <- buffer)
<<count::16, buffer::bits>> = buffer
decode_type_udt(buffer, count, [])
end
defp decode_type(<<0x0031::16, count::16, buffer::bits>>) do
decode_type_tuple(buffer, count, [])
end
defp decode_type_udt(<<buffer::bits>>, 0, acc) do
{{:udt, Enum.reverse(acc)}, buffer}
end
defp decode_type_udt(<<buffer::bits>>, count, acc) do
decode_string(field_name <- buffer)
{field_type, buffer} = decode_type(buffer)
decode_type_udt(buffer, count - 1, [{field_name, [field_type]} | acc])
end
defp decode_type_tuple(<<buffer::bits>>, 0, acc) do
{{:tuple, Enum.reverse(acc)}, buffer}
end
defp decode_type_tuple(<<buffer::bits>>, count, acc) do
{type, buffer} = decode_type(buffer)
decode_type_tuple(buffer, count - 1, [type | acc])
end
defp decode_string_multimap(<<count::16, buffer::bits>>) do
decode_string_multimap(buffer, count, [])
end
defp decode_string_multimap(<<buffer::bits>>, 0, acc) do
{Map.new(acc), buffer}
end
defp decode_string_multimap(<<buffer::bits>>, count, acc) do
decode_string(key <- buffer)
{value, buffer} = decode_string_list(buffer)
decode_string_multimap(buffer, count - 1, [{key, value} | acc])
end
defp decode_string_list(<<count::16, buffer::bits>>) do
decode_string_list(buffer, count, [])
end
defp decode_string_list(<<buffer::bits>>, 0, acc) do
{Enum.reverse(acc), buffer}
end
defp decode_string_list(<<buffer::bits>>, count, acc) do
decode_string(item <- buffer)
decode_string_list(buffer, count - 1, [item | acc])
end
defp date_from_unix_days(days) do
Date.add(~D[1970-01-01], days)
end
defp date_to_unix_days(date) do
Date.diff(date, ~D[1970-01-01])
end
defp time_from_nanoseconds(nanoseconds) do
Time.add(~T[00:00:00], nanoseconds, :nanosecond)
end
defp time_to_nanoseconds(time) do
Time.diff(time, ~T[00:00:00.000000], :nanosecond)
end
end
|
lib/xandra/protocol/v4.ex
| 0.566378
| 0.456955
|
v4.ex
|
starcoder
|
defmodule AWS.DataBrew do
@moduledoc """
AWS Glue DataBrew is a visual, cloud-scale data-preparation service.
DataBrew simplifies data preparation tasks, targeting data issues that are hard
to spot and time-consuming to fix. DataBrew empowers users of all technical
levels to visualize the data and perform one-click data transformations, with no
coding required.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2017-07-25",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "databrew",
global?: false,
protocol: "rest-json",
service_id: "DataBrew",
signature_version: "v4",
signing_name: "databrew",
target_prefix: nil
}
end
@doc """
Deletes one or more versions of a recipe at a time.
The entire request will be rejected if:
* The recipe does not exist.
* There is an invalid version identifier in the list of versions.
* The version list is empty.
* The version list size exceeds 50.
* The version list contains duplicate entries.
The request will complete successfully, but with partial failures, if:
* A version does not exist.
* A version is being used by a job.
* You specify `LATEST_WORKING`, but it's being used by a project.
* The version fails to be deleted.
The `LATEST_WORKING` version will only be deleted if the recipe has no other
versions. If you try to delete `LATEST_WORKING` while other versions exist (or
if they can't be deleted), then `LATEST_WORKING` will be listed as partial
failure in the response.
"""
def batch_delete_recipe_version(%Client{} = client, name, input, options \\ []) do
url_path = "/recipes/#{URI.encode(name)}/batchDeleteRecipeVersion"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a new DataBrew dataset.
"""
def create_dataset(%Client{} = client, input, options \\ []) do
url_path = "/datasets"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a new job to analyze a dataset and create its data profile.
"""
def create_profile_job(%Client{} = client, input, options \\ []) do
url_path = "/profileJobs"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a new DataBrew project.
"""
def create_project(%Client{} = client, input, options \\ []) do
url_path = "/projects"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a new DataBrew recipe.
"""
def create_recipe(%Client{} = client, input, options \\ []) do
url_path = "/recipes"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a new job to transform input data, using steps defined in an existing
AWS Glue DataBrew recipe
"""
def create_recipe_job(%Client{} = client, input, options \\ []) do
url_path = "/recipeJobs"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a new schedule for one or more DataBrew jobs.
Jobs can be run at a specific date and time, or at regular intervals.
"""
def create_schedule(%Client{} = client, input, options \\ []) do
url_path = "/schedules"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a dataset from DataBrew.
"""
def delete_dataset(%Client{} = client, name, input, options \\ []) do
url_path = "/datasets/#{URI.encode(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes the specified DataBrew job.
"""
def delete_job(%Client{} = client, name, input, options \\ []) do
url_path = "/jobs/#{URI.encode(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes an existing DataBrew project.
"""
def delete_project(%Client{} = client, name, input, options \\ []) do
url_path = "/projects/#{URI.encode(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a single version of a DataBrew recipe.
"""
def delete_recipe_version(%Client{} = client, name, recipe_version, input, options \\ []) do
url_path = "/recipes/#{URI.encode(name)}/recipeVersion/#{URI.encode(recipe_version)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes the specified DataBrew schedule.
"""
def delete_schedule(%Client{} = client, name, input, options \\ []) do
url_path = "/schedules/#{URI.encode(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns the definition of a specific DataBrew dataset.
"""
def describe_dataset(%Client{} = client, name, options \\ []) do
url_path = "/datasets/#{URI.encode(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns the definition of a specific DataBrew job.
"""
def describe_job(%Client{} = client, name, options \\ []) do
url_path = "/jobs/#{URI.encode(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Represents one run of a DataBrew job.
"""
def describe_job_run(%Client{} = client, name, run_id, options \\ []) do
url_path = "/jobs/#{URI.encode(name)}/jobRun/#{URI.encode(run_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns the definition of a specific DataBrew project.
"""
def describe_project(%Client{} = client, name, options \\ []) do
url_path = "/projects/#{URI.encode(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns the definition of a specific DataBrew recipe corresponding to a
particular version.
"""
def describe_recipe(%Client{} = client, name, recipe_version \\ nil, options \\ []) do
url_path = "/recipes/#{URI.encode(name)}"
headers = []
query_params = []
query_params =
if !is_nil(recipe_version) do
[{"recipeVersion", recipe_version} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns the definition of a specific DataBrew schedule.
"""
def describe_schedule(%Client{} = client, name, options \\ []) do
url_path = "/schedules/#{URI.encode(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists all of the DataBrew datasets.
"""
def list_datasets(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/datasets"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists all of the previous runs of a particular DataBrew job.
"""
def list_job_runs(
%Client{} = client,
name,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/jobs/#{URI.encode(name)}/jobRuns"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists all of the DataBrew jobs that are defined.
"""
def list_jobs(
%Client{} = client,
dataset_name \\ nil,
max_results \\ nil,
next_token \\ nil,
project_name \\ nil,
options \\ []
) do
url_path = "/jobs"
headers = []
query_params = []
query_params =
if !is_nil(project_name) do
[{"projectName", project_name} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(dataset_name) do
[{"datasetName", dataset_name} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists all of the DataBrew projects that are defined.
"""
def list_projects(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/projects"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the versions of a particular DataBrew recipe, except for `LATEST_WORKING`.
"""
def list_recipe_versions(
%Client{} = client,
max_results \\ nil,
name,
next_token \\ nil,
options \\ []
) do
url_path = "/recipeVersions"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(name) do
[{"name", name} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists all of the DataBrew recipes that are defined.
"""
def list_recipes(
%Client{} = client,
max_results \\ nil,
next_token \\ nil,
recipe_version \\ nil,
options \\ []
) do
url_path = "/recipes"
headers = []
query_params = []
query_params =
if !is_nil(recipe_version) do
[{"recipeVersion", recipe_version} | query_params]
else
query_params
end
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the DataBrew schedules that are defined.
"""
def list_schedules(
%Client{} = client,
job_name \\ nil,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/schedules"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(job_name) do
[{"jobName", job_name} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists all the tags for a DataBrew resource.
"""
def list_tags_for_resource(%Client{} = client, resource_arn, options \\ []) do
url_path = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Publishes a new version of a DataBrew recipe.
"""
def publish_recipe(%Client{} = client, name, input, options \\ []) do
url_path = "/recipes/#{URI.encode(name)}/publishRecipe"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Performs a recipe step within an interactive DataBrew session that's currently
open.
"""
def send_project_session_action(%Client{} = client, name, input, options \\ []) do
url_path = "/projects/#{URI.encode(name)}/sendProjectSessionAction"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Runs a DataBrew job.
"""
def start_job_run(%Client{} = client, name, input, options \\ []) do
url_path = "/jobs/#{URI.encode(name)}/startJobRun"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates an interactive session, enabling you to manipulate data in a DataBrew
project.
"""
def start_project_session(%Client{} = client, name, input, options \\ []) do
url_path = "/projects/#{URI.encode(name)}/startProjectSession"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Stops a particular run of a job.
"""
def stop_job_run(%Client{} = client, name, run_id, input, options \\ []) do
url_path = "/jobs/#{URI.encode(name)}/jobRun/#{URI.encode(run_id)}/stopJobRun"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Adds metadata tags to a DataBrew resource, such as a dataset, project, recipe,
job, or schedule.
"""
def tag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Removes metadata tags from a DataBrew resource.
"""
def untag_resource(%Client{} = client, resource_arn, input, options \\ []) do
url_path = "/tags/#{URI.encode(resource_arn)}"
headers = []
{query_params, input} =
[
{"TagKeys", "tagKeys"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Modifies the definition of an existing DataBrew dataset.
"""
def update_dataset(%Client{} = client, name, input, options \\ []) do
url_path = "/datasets/#{URI.encode(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Modifies the definition of an existing profile job.
"""
def update_profile_job(%Client{} = client, name, input, options \\ []) do
url_path = "/profileJobs/#{URI.encode(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Modifies the definition of an existing DataBrew project.
"""
def update_project(%Client{} = client, name, input, options \\ []) do
url_path = "/projects/#{URI.encode(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Modifies the definition of the `LATEST_WORKING` version of a DataBrew recipe.
"""
def update_recipe(%Client{} = client, name, input, options \\ []) do
url_path = "/recipes/#{URI.encode(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Modifies the definition of an existing DataBrew recipe job.
"""
def update_recipe_job(%Client{} = client, name, input, options \\ []) do
url_path = "/recipeJobs/#{URI.encode(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Modifies the definition of an existing DataBrew schedule.
"""
def update_schedule(%Client{} = client, name, input, options \\ []) do
url_path = "/schedules/#{URI.encode(name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
end
|
lib/aws/generated/data_brew.ex
| 0.77437
| 0.480235
|
data_brew.ex
|
starcoder
|
defmodule Tty2048.Grid do
@sides [:up, :down, :right, :left]
def new(size) when size > 0 do
make_grid(size)
|> seed |> seed
end
def move(grid, side)
when is_list(grid) and side in @sides do
case try_move(grid, side) do
:noop -> {grid, 0}
{:ok, grid, points} ->
{seed(grid), points}
end
end
def has_move?(grid) do
Enum.any?(@sides, &(try_move(grid, &1) != :noop))
end
defp try_move(grid, side) do
case do_move(grid, side) do
{^grid, _} -> :noop
{grid, points} ->
{:ok, grid, points}
end
end
defp do_move(grid, :left) do
collapse(grid)
|> compose(&Enum.reverse(&1, &2))
end
defp do_move(grid, :right) do
Enum.map(grid, &Enum.reverse/1)
|> collapse
|> compose(&(&2 ++ &1))
end
defp do_move(grid, :up) do
transpose(grid)
|> do_move(:left)
|> transpose
end
defp do_move(grid, :down) do
transpose(grid)
|> do_move(:right)
|> transpose
end
defp make_grid(size) do
for _ <- 1..size, do: make_row(size)
end
defp make_row(size) do
for _ <- 1..size, do: 0
end
defp compose(chunks, fun) do
Enum.map_reduce chunks, 0, fn
{acc, tail, points}, sum -> {fun.(acc, tail), sum + points}
end
end
defp transpose({grid, points}),
do: {transpose(grid), points}
defp transpose(grid, acc \\ [])
defp transpose([[] | _], acc),
do: Enum.reverse(acc)
defp transpose(grid, acc) do
{tail, row} = Enum.map_reduce(grid, [], fn
[el | rest], row -> {rest, [el | row]}
end)
transpose(tail, [Enum.reverse(row) | acc])
end
defp collapse(grid) do
Stream.map(grid, &collapse(&1, [], []))
end
defp collapse([], acc, tail) do
Enum.reverse(acc)
|> merge([], tail, 0)
end
defp collapse([0 | rest], acc, tail) do
collapse(rest, acc, [0 | tail])
end
defp collapse([el | rest], acc, tail) do
collapse(rest, [el | acc], tail)
end
defp merge([], acc, tail, points),
do: {acc, tail, points}
defp merge([el, el | rest], acc, tail, points) do
sum = el + el
merge(rest, [sum | acc], [0 | tail], points + sum)
end
defp merge([el | rest], acc, tail, points) do
merge(rest, [el | acc], tail, points)
end
defp seed(grid) do
seed(if(:random.uniform < 0.9, do: 2, else: 4), grid)
end
defp seed(num, grid) do
take_empties(grid)
|> sample
|> insert_at(num, grid)
end
defp sample({count, empties}) do
Enum.at(empties, :random.uniform(count) - 1)
end
defp insert_at({row_index, index}, num, grid) do
List.update_at(grid, row_index, &List.replace_at(&1, index, num))
end
defp take_empties(grid) do
Stream.with_index(grid)
|> Enum.reduce({0, []}, &take_empties/2)
end
defp take_empties({row, row_index}, acc) do
Stream.with_index(row) |> Enum.reduce(acc, fn
{0, index}, {count, empties} ->
{count + 1, [{row_index, index} | empties]}
_cell, acc -> acc
end)
end
end
|
lib/tty2048/grid.ex
| 0.560734
| 0.734834
|
grid.ex
|
starcoder
|
if Code.ensure_loaded?(Oban) do
defmodule PromEx.Plugins.Oban do
@moduledoc """
This plugin captures metrics emitted by Oban. Specifically, it captures metrics from job events, producer events,
and also from internal polling jobs to monitor queue sizes
This plugin supports the following options:
- `oban_supervisors`: This is an OPTIONAL option and it allows you to specify what Oban instances should have their events
tracked. By default the only Oban instance that will have its events tracked is the default `Oban` instance. As a result, by
default this option has a value of `[Oban]`. If you would like to track other named Oban instances, or perhaps your default
and only Oban instance has a different name, you can pass in your own list of Oban instances (e.g. `[Oban, Oban.PrivateJobs]`).
- `metric_prefix`: This option is OPTIONAL and is used to override the default metric prefix of
`[otp_app, :prom_ex, :oban]`. If this changes you will also want to set `oban_metric_prefix`
in your `dashboard_assigns` to the snakecase version of your prefix, the default
`oban_metric_prefix` is `{otp_app}_prom_ex_oban`.
- `poll_rate`: This option is OPTIONAL and is the rate at which poll metrics are refreshed (default is 5 seconds).
This plugin exposes the following metric groups:
- `:oban_init_event_metrics`
- `:oban_job_event_metrics`
- `:oban_producer_event_metrics`
- `:oban_circuit_event_metrics`
- `:oban_queue_poll_metrics`
To use plugin in your application, add the following to your PromEx module:
```
defmodule WebApp.PromEx do
use PromEx, otp_app: :web_app
@impl true
def plugins do
[
...
{PromEx.Plugins.Oban, oban_supervisors: [Oban, Oban.AnotherSupervisor], poll_rate: 10_000}
]
end
@impl true
def dashboards do
[
...
{:prom_ex, "oban.json"}
]
end
end
```
"""
use PromEx.Plugin
import Ecto.Query, only: [group_by: 3, select: 3]
# Oban events
@init_event [:oban, :supervisor, :init]
@job_complete_event [:oban, :job, :stop]
@job_exception_event [:oban, :job, :exception]
@producer_complete_event [:oban, :producer, :stop]
@producer_exception_event [:oban, :producer, :exception]
@circuit_breaker_trip_event [:oban, :circuit, :trip]
@circuit_breaker_open_event [:oban, :circuit, :open]
# PromEx Oban proxy events
@init_event_queue_limit_proxy [:prom_ex, :oban, :queue, :limit, :proxy]
@impl true
def event_metrics(opts) do
otp_app = Keyword.fetch!(opts, :otp_app)
metric_prefix = Keyword.get(opts, :metric_prefix, PromEx.metric_prefix(otp_app, :oban))
oban_supervisors = get_oban_supervisors(opts)
keep_function_filter = keep_oban_instance_metrics(oban_supervisors)
# Set up event proxies
set_up_init_proxy_event(metric_prefix)
[
oban_supervisor_init_event_metrics(metric_prefix, keep_function_filter),
oban_job_event_metrics(metric_prefix, keep_function_filter),
oban_producer_event_metrics(metric_prefix, keep_function_filter),
oban_circuit_breaker_event_metrics(metric_prefix, keep_function_filter)
]
end
@impl true
def polling_metrics(opts) do
otp_app = Keyword.fetch!(opts, :otp_app)
metric_prefix = Keyword.get(opts, :metric_prefix, PromEx.metric_prefix(otp_app, :oban))
poll_rate = Keyword.get(opts, :poll_rate, 5_000)
oban_supervisors = get_oban_supervisors(opts)
# Queue length details
Polling.build(
:oban_queue_poll_metrics,
poll_rate,
{__MODULE__, :execute_queue_metrics, [oban_supervisors]},
[
last_value(
metric_prefix ++ [:queue, :length, :count],
event_name: [:prom_ex, :plugin, :oban, :queue, :length, :count],
description: "The total number jobs that are in the queue in the designated state",
measurement: :count,
tags: [:name, :queue, :state]
)
]
)
end
@doc false
def execute_queue_metrics(oban_supervisors) do
oban_supervisors
|> Enum.each(fn oban_supervisor ->
oban_supervisor
|> Oban.Registry.whereis()
|> case do
oban_pid when is_pid(oban_pid) ->
config = Oban.Registry.config(oban_supervisor)
handle_oban_queue_polling_metrics(oban_supervisor, config)
_ ->
:skip
end
end)
end
def oban_circuit_breaker_event_metrics(metric_prefix, keep_function_filter) do
Event.build(
:oban_circuit_breaker_event_metrics,
[
counter(
metric_prefix ++ [:circuit, :trip, :total],
event_name: @circuit_breaker_trip_event,
description: "The number of circuit breaker events that have occurred",
tag_values: &circuit_breaker_trip_tag_values/1,
tags: [:name, :circuit_breaker],
keep: keep_function_filter
),
counter(
metric_prefix ++ [:circuit, :open, :total],
event_name: @circuit_breaker_open_event,
description: "The number of circuit open events that have occurred.",
tag_values: &circuit_breaker_trip_tag_values/1,
tags: [:name, :circuit_breaker],
keep: keep_function_filter
)
]
)
end
@doc false
def handle_proxy_init_event(_event_name, _event_measurement, event_metadata, _config) do
Enum.each(event_metadata.conf.queues, fn {queue, queue_opts} ->
limit = Keyword.get(queue_opts, :limit, 0)
metadata = %{
queue: queue,
name: event_metadata.conf.name
}
:telemetry.execute(@init_event_queue_limit_proxy, %{limit: limit}, metadata)
end)
end
defp circuit_breaker_trip_tag_values(%{name: name, config: conf}) do
%{
name: normalize_module_name(conf.name),
circuit_breaker: normalize_module_name(name)
}
end
defp circuit_breaker_trip_tag_values(%{name: name, conf: conf}) do
%{
name: normalize_module_name(conf.name),
circuit_breaker: normalize_module_name(name)
}
end
defp oban_job_event_metrics(metric_prefix, keep_function_filter) do
job_attempt_buckets = [1, 2, 3, 5, 10]
job_duration_buckets = [1, 50, 100, 250, 500, 1_000, 5_000, 10_000]
Event.build(
:oban_job_event_metrics,
[
distribution(
metric_prefix ++ [:job, :processing, :duration, :milliseconds],
event_name: @job_complete_event,
measurement: :duration,
description: "The amount of time it takes to processes an Oban job.",
reporter_options: [
buckets: job_duration_buckets
],
tag_values: &job_complete_tag_values/1,
tags: [:name, :queue, :state, :worker],
unit: {:native, :millisecond},
keep: keep_function_filter
),
distribution(
metric_prefix ++ [:job, :queue, :time, :milliseconds],
event_name: @job_complete_event,
measurement: :queue_time,
description: "The amount of time that the Oban job was waiting in queue for processing.",
reporter_options: [
buckets: job_duration_buckets
],
tag_values: &job_complete_tag_values/1,
tags: [:name, :queue, :state, :worker],
unit: {:native, :millisecond},
keep: keep_function_filter
),
distribution(
metric_prefix ++ [:job, :complete, :attempts],
event_name: @job_complete_event,
measurement: fn _measurement, %{attempt: attempt} ->
attempt
end,
description: "The number of times a job was attempted prior to completing.",
reporter_options: [
buckets: job_attempt_buckets
],
tag_values: &job_complete_tag_values/1,
tags: [:name, :queue, :state, :worker],
keep: keep_function_filter
),
distribution(
metric_prefix ++ [:job, :exception, :duration, :milliseconds],
event_name: @job_exception_event,
measurement: :duration,
description: "The amount of time it took to process a job the encountered an exception.",
reporter_options: [
buckets: job_duration_buckets
],
tag_values: &job_exception_tag_values/1,
tags: [:name, :queue, :state, :worker, :kind, :error],
unit: {:native, :millisecond},
keep: keep_function_filter
),
distribution(
metric_prefix ++ [:job, :exception, :queue, :time, :milliseconds],
event_name: @job_exception_event,
measurement: :queue_time,
description:
"The amount of time that the Oban job was waiting in queue for processing prior to resulting in an exception.",
reporter_options: [
buckets: job_duration_buckets
],
tag_values: &job_exception_tag_values/1,
tags: [:name, :queue, :state, :worker, :kind, :error],
unit: {:native, :millisecond},
keep: keep_function_filter
),
distribution(
metric_prefix ++ [:job, :exception, :attempts],
event_name: @job_exception_event,
measurement: fn _measurement, %{attempt: attempt} ->
attempt
end,
description: "The number of times a job was attempted prior to throwing an exception.",
reporter_options: [
buckets: job_attempt_buckets
],
tag_values: &job_exception_tag_values/1,
tags: [:name, :queue, :state, :worker],
keep: keep_function_filter
)
]
)
end
defp oban_producer_event_metrics(metric_prefix, keep_function_filter) do
Event.build(
:oban_producer_event_metrics,
[
distribution(
metric_prefix ++ [:producer, :duration, :milliseconds],
event_name: @producer_complete_event,
measurement: :duration,
description: "How long it took to dispatch the job.",
reporter_options: [
buckets: [1, 50, 100, 250, 500, 1_000, 5_000, 10_000]
],
unit: {:native, :millisecond},
tag_values: &producer_tag_values/1,
tags: [:queue, :name],
keep: keep_function_filter
),
distribution(
metric_prefix ++ [:producer, :dispatched, :count],
event_name: @producer_complete_event,
measurement: fn _measurement, %{dispatched_count: count} ->
count
end,
description: "The number of jobs that were dispatched.",
reporter_options: [
buckets: [5, 10, 25, 50, 100]
],
tag_values: &producer_tag_values/1,
tags: [:queue, :name],
keep: keep_function_filter
),
distribution(
metric_prefix ++ [:producer, :exception, :duration, :milliseconds],
event_name: @producer_exception_event,
measurement: :duration,
description: "How long it took for the producer to raise an exception.",
reporter_options: [
buckets: [1, 50, 100, 250, 500, 1_000, 5_000, 10_000]
],
unit: {:native, :millisecond},
tag_values: &producer_tag_values/1,
tags: [:queue, :name],
keep: keep_function_filter
)
]
)
end
defp job_complete_tag_values(metadata) do
config =
case metadata do
%{config: config} ->
config
%{conf: config} ->
config
end
%{
name: normalize_module_name(config.name),
queue: metadata.job.queue,
state: metadata.state,
worker: metadata.worker
}
end
defp job_exception_tag_values(metadata) do
error =
case metadata.error do
%error_type{} -> normalize_module_name(error_type)
_ -> "Undefined"
end
config =
case metadata do
%{config: config} ->
config
%{conf: config} ->
config
end
%{
name: normalize_module_name(config.name),
queue: metadata.job.queue,
state: metadata.state,
worker: metadata.worker,
kind: metadata.kind,
error: error
}
end
defp producer_tag_values(metadata) do
%{
queue: metadata.queue,
name: normalize_module_name(metadata.conf.name)
}
end
defp oban_supervisor_init_event_metrics(metric_prefix, keep_function_filter) do
Event.build(
:oban_init_event_metrics,
[
last_value(
metric_prefix ++ [:init, :status, :info],
event_name: @init_event,
description: "Information regarding the initialized oban supervisor.",
measurement: fn _measurements -> 1 end,
tags: [:name, :node, :plugins, :prefix, :queues, :repo],
tag_values: &oban_init_tag_values/1,
keep: keep_function_filter
),
last_value(
metric_prefix ++ [:init, :circuit, :backoff, :milliseconds],
event_name: @init_event,
description: "The Oban supervisor's circuit backoff value.",
measurement: fn _measurements, %{conf: config} ->
config.circuit_backoff
end,
tags: [:name],
tag_values: &oban_init_tag_values/1,
keep: keep_function_filter
),
last_value(
metric_prefix ++ [:init, :shutdown, :grace, :period, :milliseconds],
event_name: @init_event,
description: "The Oban supervisor's shutdown grace period value.",
measurement: fn _measurements, %{conf: config} ->
config.shutdown_grace_period
end,
tags: [:name],
tag_values: &oban_init_tag_values/1,
keep: keep_function_filter
),
last_value(
metric_prefix ++ [:init, :dispatch, :cooldown, :milliseconds],
event_name: @init_event,
description: "The Oban supervisor's dispatch cooldown value.",
measurement: fn _measurements, %{conf: config} ->
config.dispatch_cooldown
end,
tags: [:name],
tag_values: &oban_init_tag_values/1,
keep: keep_function_filter
),
last_value(
metric_prefix ++ [:init, :queue, :concurrency, :limit],
event_name: @init_event_queue_limit_proxy,
description: "The concurrency limits of each of the Oban queue.",
measurement: :limit,
tags: [:name, :queue],
tag_values: &oban_init_queues_tag_values/1,
keep: keep_function_filter
)
]
)
end
defp handle_oban_queue_polling_metrics(oban_supervisor, config) do
query =
Oban.Job
|> group_by([j], [j.queue, j.state])
|> select([j], {j.queue, j.state, count(j.id)})
config
|> Oban.Repo.all(query)
|> Enum.each(fn {queue, state, count} ->
measurements = %{count: count}
metadata = %{name: normalize_module_name(oban_supervisor), queue: queue, state: state}
:telemetry.execute([:prom_ex, :plugin, :oban, :queue, :length, :count], measurements, metadata)
end)
end
defp get_oban_supervisors(opts) do
opts
|> Keyword.get(:oban_supervisors, [Oban])
|> case do
supervisors when is_list(supervisors) ->
MapSet.new(supervisors)
_ ->
raise "Invalid :oban_supervisors option value."
end
end
defp keep_oban_instance_metrics(oban_supervisors) do
fn
%{conf: %{name: name}} ->
MapSet.member?(oban_supervisors, name)
%{name: name} ->
MapSet.member?(oban_supervisors, name)
_ ->
false
end
end
defp oban_init_tag_values(%{conf: config}) do
plugins_string_list =
config.plugins
|> Enum.map(fn plugin ->
normalize_module_name(plugin)
end)
|> Enum.join(", ")
queues_string_list =
config.queues
|> Enum.map(fn {queue, _queue_opts} ->
Atom.to_string(queue)
end)
|> Enum.join(", ")
%{
name: normalize_module_name(config.name),
node: config.node,
plugins: plugins_string_list,
prefix: config.prefix,
queues: queues_string_list,
repo: normalize_module_name(config.repo)
}
end
defp oban_init_queues_tag_values(%{name: name, queue: queue}) do
%{
name: normalize_module_name(name),
queue: queue
}
end
defp set_up_init_proxy_event(prefix) do
:telemetry.attach(
[:prom_ex, :oban, :proxy] ++ prefix,
@init_event,
&__MODULE__.handle_proxy_init_event/4,
%{}
)
end
defp normalize_module_name(name) when is_atom(name) do
name
|> Atom.to_string()
|> String.trim_leading("Elixir.")
end
defp normalize_module_name({name, _options}), do: normalize_module_name(name)
defp normalize_module_name(name), do: name
end
else
defmodule PromEx.Plugins.Oban do
@moduledoc false
use PromEx.Plugin
@impl true
def event_metrics(_opts) do
PromEx.Plugin.no_dep_raise(__MODULE__, "Oban")
end
end
end
|
lib/prom_ex/plugins/oban.ex
| 0.877313
| 0.614914
|
oban.ex
|
starcoder
|
defmodule VintageNetMobile do
@behaviour VintageNet.Technology
alias VintageNet.Interface.RawConfig
@moduledoc """
Use cellular modems with VintageNet
This module is not intended to be called directly but via calls to `VintageNet`. Here's a
typical example:
```elixir
VintageNet.configure(
"ppp0",
%{
type: VintageNetMobile,
vintage_net_mobile: %{
modem: VintageNetMobile.Modem.QuectelBG96,
service_providers: [%{apn: "super"}]
}
}
)
```
The `:modem` key should be set to your modem implementation. Cellular modems
tend to be very similar. If `vintage_net_mobile` doesn't support your modem, see
the customizing section. It may just be a copy/paste away. See your modem
module for modem-specific options. The following keys are supported by all modems:
* `:service_providers` - This is a list of service provider information
* `:chatscript_additions` - This is a string (technically iodata) for custom
modem initialization.
The `:service_providers` key should be set to information provided by each of
your service providers. It is common that this is a list of one item.
Circumstances may require you to list more than one, though. Additionally, modem
implementations may require more or less information depending on their
implementation. (It's possible to hard-code the service provider in the modem
implementation. In that case, this key isn't used and should be set to an empty
list. This is useful when your cellular modem provides instructions that
magically work and the AT commands that they give are confusing.)
Information for each service provider is a map with some or all of the following
fields:
* `:apn` (required) - e.g., `"access_point_name"`
* `:usage` (optional) - `:eps_bearer` (LTE) or `:pdp` (UMTS/GPRS)
Your service provider should provide you with the information that you need to
connect. Often it is just an APN. The Gnome project provides a database of
[service provider
information](https://wiki.gnome.org/Projects/NetworkManager/MobileBroadband/ServiceProviders)
that may also be useful.
Here's an example with a service provider list:
```elixir
%{
type: VintageNetMobile,
modem: your_modem,
vintage_net_mobile: %{
service_providers: [
%{apn: "wireless.twilio.com"}
],
chatscript_additions: "OK AT"
}
}
```
## Custom modems
`VintageNetMobile` allows you add custom modem implementations if the built-in
ones don't work for you. See the `VintageNetMobile.Modem` behaviour.
In order to implement a modem, you will need:
1. Instructions for connecting to the modem via your Linux. Sometimes this
involves `usb_modeswitch` or knowing which serial ports the modem exposes.
2. Example chat scripts. These are lists of `AT` commands and their expected
responses for configuring the service provider and entering `PPP` mode.
3. (Optional) Instructions for checking the signal strength when connected.
One strategy is to see if there's an existing modem that looks similar to yours
and modify it.
"""
@typedoc """
Information about a service provider
* `:apn` (required) - e.g., `"access_point_name"`
* `:usage` (optional) - `:eps_bearer` (LTE) or `:pdp` (UMTS/GPRS)
"""
@type service_provider_info :: %{
required(:apn) => String.t(),
optional(:usage) => :eps_bearer | :pdp
}
@typedoc """
The `:vintage_net_mobile` option in the configuration map
Only the `:service_providers` key must be specified. Modems may
add keys of their own.
"""
@type mobile_options :: %{
required(:service_providers) => service_provider_info(),
optional(:chatscript_additions) => iodata(),
optional(any) => any
}
@typedoc """
Radio Access Technology (RAT)
These define how to connect to the cellular network.
"""
@type rat :: :gsm | :td_scdma | :wcdma | :lte | :cdma | :lte_cat_nb1 | :lte_cat_m1
@impl true
def normalize(%{type: __MODULE__, vintage_net_mobile: mobile} = config) do
modem = Map.fetch!(mobile, :modem)
modem.normalize(config)
end
@impl true
def to_raw_config(ifname, %{type: __MODULE__, vintage_net_mobile: mobile} = config, opts) do
modem = Map.fetch!(mobile, :modem)
%RawConfig{
ifname: ifname,
type: __MODULE__,
source_config: config,
required_ifnames: [ppp_to_wwan(ifname)]
}
|> modem.add_raw_config(config, opts)
|> add_start_commands(modem)
|> add_cleanup_command()
end
@impl true
def ioctl(_ifname, _command, _args), do: {:error, :unsupported}
@impl true
def check_system(_), do: {:error, "unimplemented"}
defp add_start_commands(raw_config, _modem) do
# The mknod creates `/dev/ppp` if it doesn't exist.
new_up_cmds = [
{:run_ignore_errors, "mknod", ["/dev/ppp", "c", "108", "0"]} | raw_config.up_cmds
]
%RawConfig{raw_config | up_cmds: new_up_cmds}
end
defp add_cleanup_command(raw_config) do
cmds = [
{:fun, VintageNet.PropertyTable, :clear_prefix,
[VintageNet, ["interface", raw_config.ifname, "mobile"]]}
| raw_config.down_cmds
]
%RawConfig{raw_config | down_cmds: cmds}
end
defp ppp_to_wwan("ppp" <> index), do: "wwan" <> index
defp ppp_to_wwan(something_else), do: something_else
end
|
lib/vintage_net_mobile.ex
| 0.846546
| 0.722772
|
vintage_net_mobile.ex
|
starcoder
|
defmodule ExVault do
@moduledoc """
TODO: Proper documentation for ExVault.
"""
alias ExVault.Response
@middleware [
Tesla.Middleware.JSON
]
@adapter Tesla.Adapter.Hackney
@typedoc """
Options:
* `:address` - Address of the Vault server to talk to. (Required)
* `:token` - The Vault token to authenticate with. If not provided, any
calls requiring authentication will fail.
* `:adapter` - The Tesla adapter to use. Defaults to
`Tesla.Adapter.Hackney`, which has the best out-of-the-box TLS support.
Don't change this without a specific reason.
"""
@type option() ::
{:address, String.t()}
| {:token, String.t()}
| {:adapter, Tesla.Client.adapter()}
@type client :: Tesla.Client.t()
@type body :: Tesla.Env.body()
@type response :: Response.t()
@doc """
Create a new `ExVault` client.
"""
@spec new([option()]) :: client()
def new(opts) do
middleware =
[
{Tesla.Middleware.BaseUrl, opts[:address]},
{Tesla.Middleware.Headers, [{"X-Vault-Token", opts[:token]}]}
] ++ @middleware
adapter = opts[:adapter] || @adapter
Tesla.client(middleware, adapter)
end
@doc """
Perform a 'read' operation.
Params:
* `client` must be an `t:ExVault.client/0` value, as constructed by
`ExVault.new/1`.
* `path` is the full path of the entity to read.
* `opts` is currently unused. FIXME
"""
@spec read(client(), String.t(), keyword()) :: Response.t()
def read(client, path, opts \\ []) do
client
|> Tesla.get("/v1/#{path}", opts)
|> Response.parse_response()
end
@doc """
Perform a 'write' operation.
Params:
* `client` must be an `t:ExVault.client/0` value, as constructed by
`ExVault.new/1`.
* `path` is the full path of the entity to write.
* `params` should be the data to be written, usually in the form of an
Elixir map.
"""
@spec write(client(), String.t(), body()) :: Response.t()
def write(client, path, params) do
client
|> Tesla.post("/v1/#{path}", params)
|> Response.parse_response()
end
@doc """
Perform a 'delete' operation.
Params:
* `client` must be an `t:ExVault.client/0` value, as constructed by
`ExVault.new/1`.
* `path` is the full path of the entity to delete.
"""
@spec delete(client(), String.t()) :: Response.t()
def delete(client, path) do
client
|> Tesla.request(url: "/v1/#{path}", method: "DELETE")
|> Response.parse_response()
end
@doc """
Perform a 'list' operation.
Params:
* `client` must be an `t:ExVault.client/0` value, as constructed by
`ExVault.new/1`.
* `path` is the full path of the entity to list.
"""
@spec list(client(), String.t()) :: Response.t()
def list(client, path) do
client
|> Tesla.request(url: "/v1/#{path}", method: "LIST")
|> Response.parse_response()
end
end
|
lib/exvault.ex
| 0.607197
| 0.425038
|
exvault.ex
|
starcoder
|
defmodule XDR.Type.VariableArray do
@moduledoc """
A variable-length array of some other type
"""
alias XDR.Size
defstruct type_name: "VariableArray", data_type: nil, max_length: Size.max(), values: []
@type t() :: %__MODULE__{
type_name: String.t(),
data_type: XDR.Type.t(),
max_length: XDR.Size.t(),
values: list(XDR.Type.t())
}
@type options() :: [type: XDR.Type.key(), max_length: XDR.Size.t()]
defimpl XDR.Type do
alias XDR.Error
def build_type(type, type: data_type, max_length: max_length) when is_integer(max_length) do
%{type | data_type: data_type, max_length: max_length}
end
def build_type(type, options) do
build_type(type, Keyword.merge(options, max_length: Size.max()))
end
def resolve_type!(%{data_type: data_type} = type, %{} = custom_types) do
%{type | data_type: XDR.Type.resolve_type!(data_type, custom_types)}
end
def build_value!(
%{data_type: data_type, max_length: max_length, type_name: name} = type,
raw_values
)
when is_list(raw_values) do
if length(raw_values) > max_length do
raise Error,
message: "Input values too long, expected a max of #{max_length} values",
type: name,
data: raw_values
end
values =
raw_values
|> Enum.with_index()
|> Enum.map(fn {value, index} ->
Error.wrap_call(:build_value!, [data_type, value], index)
end)
%{type | values: values}
end
def extract_value!(%{values: values}) do
values
|> Enum.with_index()
|> Enum.map(fn {type_with_value, index} ->
Error.wrap_call(:extract_value!, [type_with_value], index)
end)
end
def encode!(%{values: values}) do
encoded_length = Size.encode(length(values))
encoded_values =
values
|> Enum.with_index()
|> Enum.map(fn {type_with_value, index} ->
Error.wrap_call(:encode!, [type_with_value], index)
end)
|> Enum.join()
encoded_length <> encoded_values
end
def decode!(%{data_type: data_type} = type, full_encoding) do
{length, encoding} = Size.decode!(full_encoding)
{reversed_values, rest} =
Enum.reduce(0..(length - 1), {[], encoding}, fn index, {vals, prev_rest} ->
{current_value, next_rest} =
Error.wrap_call(XDR.Type, :decode!, [data_type, prev_rest], index)
{[current_value | vals], next_rest}
end)
{%{type | values: Enum.reverse(reversed_values)}, rest}
end
end
end
|
lib/xdr/types/variable_array.ex
| 0.806967
| 0.486636
|
variable_array.ex
|
starcoder
|
defmodule HullSTL.Geometry do
alias Graphmath.Vec3, as: V
# dimensions in meters
@hboh 1.5 # half beam of hull at widest
@x_step_size 0.05
@x_steps trunc(@hboh / @x_step_size) # number of buttocks
@rib_spacing 20 # number of stations between transverse re-enforcement centers
@stringer_spacing 10 # number of buttocks between fore-and-aft re-enforcements
@stiffener_width 2 # a stiffener is a rib or a stringer
@stiffener_thickness 0.1 # total hull thickness at reenforcement point
@ordinary_thickness 0.01 # normal scantling thickness
@x_power 1.5 # profile is y = x^@x_power as a simple start
def section(station, z_step_size) do
0..@x_steps
|> Enum.map(fn i ->
thickness = max(_thickness(@stringer_spacing, i), _thickness(@rib_spacing, station))
x = (i * @x_step_size)
y = _calc_y(x)
normal = _normal(x)
offset = V.scale(normal, thickness)
outer = V.create(x, y, 0.0) |> V.add(_origin(station * z_step_size))
%{
station: station,
buttock: i,
normal: normal,
thickness: thickness,
outer: outer,
inner: V.add(outer, offset)
}
end)
end
def start_triangles(current) do
1..@x_steps
|> Enum.flat_map(fn i -> _start_triangle_faces(i, current) end)
|> Enum.map(fn i -> %{normal: _triangle_to_normal(i), triangle: i} end)
end
def triangles([last, _], current) do
0..@x_steps
|> Enum.flat_map(fn i -> _face_triangles(i, last, current) end)
|> Enum.map(fn i -> %{normal: _triangle_to_normal(i), triangle: i} end)
end
# define the fore-aft line to sweep the origin of the section along
defp _origin(z) do
V.create(0.0, 0.0, z)
# the line must curve in the Y (plan plane toward the centerline) direction toward the bow
# to keep the stringers sane rather than rise to meet the sheer
end
# define the sheer line
defp _sheer(z) do
V.create(@hboh, _calc_y(@hboh), z)
end
# define the hull section to be swept along the line y = f(x)
# Basic design assumption is that the section is constant and independent of z
defp _calc_y(x) do
:math.pow(x,@x_power)
end
defp _normal(x) do
up=V.create(1.0, @x_power * x, 0.0)
forward=V.create(0.0, 0.0, 1.0)
V.cross(forward, up) |> V.normalize()
end
#define the scantlings (hull skin thickness) including re-enforcments (stringers / ribs)
defp _thickness(spacing, i) do
if (rem(i, spacing) >= (spacing - @stiffener_width)) do
@stiffener_thickness
else
@ordinary_thickness
end
end
defp _step_to_x(i,{_,_,station_z}) do
# want @x_steps equal steps (buttocks) from origin to sheer
IO.inspect(:stderr, _sheer(station_z), [])
IO.inspect(:stderr, _origin(station_z), [])
{x_interval,_,_} = V.subtract(_sheer(station_z), _origin(station_z))
step_size = x_interval / @x_steps
{i, i * step_size}
end
defp _triangle_to_normal({{x0,y0,z0}, {x1,y1,z1}, {x2,y2,z2}}) do
V.cross(V.create(x1 - x0, y1 - y0, z1 - z0), V.create(x2 - x0, y2 - y0, z2 - z0)) |> V.normalize()
end
defp _start_triangle_faces(i, current) do
point = Enum.at(current, i)
lower_point = Enum.at(current, i - 1)
[{point.inner, point.outer, lower_point.outer}, {lower_point.outer, lower_point.inner, point.inner}]
end
defp _face_triangles(0, last, current) do
point = Enum.at(current, 0)
aft_point = Enum.at(last, 0)
[{point.inner, point.outer, aft_point.outer}, {aft_point.outer, aft_point.inner, point.inner}]
end
defp _face_triangles(i, last, current) do
point = Enum.at(current, i)
lower_point = Enum.at(current, i - 1)
aft_point = Enum.at(last, i)
lower_aft_point = Enum.at(last, i - 1)
# clockwise winding
outer = [{aft_point, point, lower_point}, {aft_point, lower_point, lower_aft_point}]
|> Enum.map(fn {a,b,c} -> {a.outer,b.outer,c.outer} end)
inner = [{point, aft_point, lower_aft_point}, {point, lower_aft_point, lower_point}]
|> Enum.map(fn {a,b,c} -> {a.inner,b.inner,c.inner} end)
if (@x_steps == i) do
outer ++ inner ++ [{point.inner, point.outer, aft_point.outer}, {aft_point.outer, aft_point.inner, point.inner}]
else
outer ++ inner
end
end
end
|
lib/geometry.ex
| 0.566498
| 0.677397
|
geometry.ex
|
starcoder
|
defmodule Goth do
@external_resource "README.md"
@moduledoc """
A Goth token server.
"""
use GenServer
require Logger
alias Goth.Backoff
alias Goth.Token
@registry Goth.Registry
@max_retries 20
@default_refresh_after 3_300_000
@doc """
Starts the server.
When the server is started, we attempt to fetch the token and store it in
internal cache. If we fail, we'll retry with backoff.
## Options
* `:name` - a unique name to register the server under. It can be any term.
* `:source` - the source to retrieve the token from.
See documentation for the `:source` option in `Goth.Token.fetch/1` for
more information.
* `:refresh_after` - Time in milliseconds after which the token will be automatically
refreshed. Defaults to `3_300_000` (55 minutes; 5 minutes before the token, which
is valid for 1h, expires)
* `:http_client` - a function that makes the HTTP request. Defaults to using built-in
integration with [Hackney](https://github.com/benoitc/hackney)
See documentation for the `:http_client` option in `Goth.Token.fetch/1` for
more information.
* `:prefetch` - how to prefetch the token when the server starts. The possible options
are `:async` to do it asynchronously or `:sync` to do it synchronously
(that is, the server doesn't start until an attempt to fetch the token was made). Defaults
to `:async`.
* `:max_retries` - the maximum number of retries (default: `20`)
* `:backoff_min` - the minimum backoff interval (default: `1_000`)
* `:backoff_max` - the maximum backoff interval (default: `30_000`)
* `:backoff_type` - the backoff strategy, `:exp` for exponential, `:rand` for random and
`:rand_exp` for random exponential (default: `:rand_exp`)
## Examples
Generate a token using a service account credentials file:
iex> credentials = "credentials.json" |> File.read!() |> Jason.decode!()
iex> {:ok, _} = Goth.start_link(name: MyApp.Goth, source: {:service_account, credentials, []})
iex> Goth.fetch!(MyApp.Goth)
%Goth.Token{...}
Retrieve the token using a refresh token:
iex> credentials = "credentials.json" |> File.read!() |> Jason.decode!()
iex> {:ok, _} = Goth.start_link(name: MyApp.Goth, source: {:refresh_token, credentials, []})
iex> Goth.fetch!(MyApp.Goth)
%Goth.Token{...}
Retrieve the token using the Google metadata server:
iex> {:ok, _} = Goth.start_link(name: MyApp.Goth, source: {:metadata, []})
iex> Goth.fetch!(MyApp.Goth)
%Goth.Token{...}
"""
@doc since: "1.3.0"
def start_link(opts) do
opts =
opts
|> Keyword.put_new(:refresh_after, @default_refresh_after)
|> Keyword.put_new(:http_client, {:hackney, []})
name = Keyword.fetch!(opts, :name)
GenServer.start_link(__MODULE__, opts, name: registry_name(name))
end
defmacrop ensure_hackney do
if Code.ensure_loaded?(:hackney) do
:ok
else
quote do
unless Code.ensure_loaded?(:hackney) do
Logger.error("""
Could not find hackney dependency.
Please add :hackney to your dependencies:
{:hackney, "~> 1.17"}
Or use a different HTTP client. See Goth.Token.fetch/1 documentation for more information.
""")
raise "missing hackney dependency"
end
{:ok, _} = Application.ensure_all_started(:hackney)
:ok
end
end
end
def __hackney__(options) do
ensure_hackney()
{method, options} = Keyword.pop!(options, :method)
{url, options} = Keyword.pop!(options, :url)
{headers, options} = Keyword.pop!(options, :headers)
{body, options} = Keyword.pop!(options, :body)
options = [:with_body] ++ options
case :hackney.request(method, url, headers, body, options) do
{:ok, status, headers, response_body} ->
{:ok, %{status: status, headers: headers, body: response_body}}
{:error, reason} ->
{:error, RuntimeError.exception(inspect(reason))}
end
end
@doc """
Fetches the token from the cache.
If the token is not in the cache, this function blocks for `timeout`
milliseconds (defaults to `5000`) while it is attempted to fetch
it in the background.
To fetch the token bypassing the cache, see `Goth.Token.fetch/1`.
"""
@doc since: "1.3.0"
def fetch(name, timeout \\ 5000) do
read_from_ets(name) || GenServer.call(registry_name(name), :fetch, timeout)
end
@doc """
Fetches the token, erroring if it is missing.
See `fetch/2` for more information.
"""
@doc since: "1.3.0"
def fetch!(name, timeout \\ 5000) do
case fetch(name, timeout) do
{:ok, token} -> token
{:error, exception} -> raise exception
end
end
defstruct [
:name,
:source,
:backoff,
:http_client,
:retry_after,
:refresh_after,
max_retries: @max_retries,
retries: @max_retries
]
defp read_from_ets(name) do
case Registry.lookup(@registry, name) do
[{_pid, %Token{} = token}] -> {:ok, token}
_ -> nil
end
end
@impl true
def init(opts) when is_list(opts) do
{backoff_opts, opts} = Keyword.split(opts, [:backoff_type, :backoff_min, :backoff_max])
{prefetch, opts} = Keyword.pop(opts, :prefetch, :async)
state = struct!(__MODULE__, opts)
state =
state
|> Map.update!(:http_client, &start_http_client/1)
|> Map.replace!(:backoff, Backoff.new(backoff_opts))
|> Map.replace!(:retries, state.max_retries)
case prefetch do
:async ->
{:ok, state, {:continue, :async_prefetch}}
:sync ->
prefetch(state)
{:ok, state}
end
end
@impl true
def handle_continue(:async_prefetch, state) do
prefetch(state)
{:noreply, state}
end
defp prefetch(state) do
# given calculating JWT for each request is expensive, we do it once
# on system boot to hopefully fill in the cache.
case Token.fetch(state) do
{:ok, token} ->
store_and_schedule_refresh(state, token)
{:error, _} ->
send(self(), :refresh)
end
end
@impl true
def handle_call(:fetch, _from, state) do
reply = read_from_ets(state.name) || fetch_and_schedule_refresh(state)
{:reply, reply, state}
end
defp fetch_and_schedule_refresh(state) do
with {:ok, token} <- Token.fetch(state) do
store_and_schedule_refresh(state, token)
{:ok, token}
end
end
defp start_http_client(:hackney) do
{&__hackney__/1, []}
end
defp start_http_client({:hackney, opts}) do
{&__hackney__/1, opts}
end
defp start_http_client(fun) when is_function(fun, 1) do
{fun, []}
end
defp start_http_client({fun, opts}) when is_function(fun, 1) do
{fun, opts}
end
defp start_http_client({module, _} = config) when is_atom(module) do
Logger.warn("Setting http_client: mod | {mod, opts} is deprecated in favour of http_client: fun | {fun, opts}")
Goth.HTTPClient.init(config)
end
@impl true
def handle_info(:refresh, state) do
case Token.fetch(state) do
{:ok, token} -> do_refresh(token, state)
{:error, exception} -> handle_retry(exception, state)
end
end
defp handle_retry(exception, %{retries: 1}) do
raise "too many failed attempts to refresh, last error: #{inspect(exception)}"
end
defp handle_retry(_, state) do
{time_in_seconds, backoff} = Backoff.backoff(state.backoff)
Process.send_after(self(), :refresh, time_in_seconds)
{:noreply, %{state | retries: state.retries - 1, backoff: backoff}}
end
defp do_refresh(token, state) do
state = %{state | retries: state.max_retries, backoff: Backoff.reset(state.backoff)}
store_and_schedule_refresh(state, token)
{:noreply, state}
end
defp store_and_schedule_refresh(state, token) do
put(state.name, token)
Process.send_after(self(), :refresh, state.refresh_after)
end
defp put(name, token) do
Registry.update_value(@registry, name, fn _ -> token end)
end
defp registry_name(name) do
{:via, Registry, {@registry, name}}
end
end
|
lib/goth.ex
| 0.878503
| 0.540621
|
goth.ex
|
starcoder
|
defmodule ExDiceRoller.Compilers.Roll do
@moduledoc """
Handles compiling dice roll expressions.
iex> expr = "1d6"
"1d6"
iex> {:ok, tokens} = ExDiceRoller.Tokenizer.tokenize(expr)
{:ok, [{:int, 1, '1'}, {:roll, 1, 'd'}, {:int, 1, '6'}]}
iex> {:ok, parse_tree} = ExDiceRoller.Parser.parse(tokens)
{:ok, {:roll, 1, 6}}
iex> fun = ExDiceRoller.Compilers.Roll.compile(parse_tree)
iex> fun.([])
3
iex> fun.([])
2
## Exploding Dice
Some systems use a dice mechanic known as 'exploding dice'. The mechanic works
as follows:
1. a multi-sided die, in this case a six-sided die, is rolled
2. if the value is anything other than six, they record the result and skip
to step 5
3. if the value is six, the result is recorded, and the die is rolled again
4. steps 1 and 3 are repeated until step 2 is reached
5. the sum total result of all rolls is recorded and used
You can utilize this mechanic by specifying the `:explode` option for
ExDiceRoller.roll/2 calls, or specifying the `e` flag when using the `~a`
sigil. This option can be used with any ExDiceRoller roll option.
It should also be noted that the exploding dice mechanic is not applied to a
one-sided die, since that would result in an infinite loop.
Examples:
iex> expr = "1d6"
"1d6"
iex> {:ok, tokens} = ExDiceRoller.Tokenizer.tokenize(expr)
{:ok, [{:int, 1, '1'}, {:roll, 1, 'd'}, {:int, 1, '6'}]}
iex> {:ok, parse_tree} = ExDiceRoller.Parser.parse(tokens)
{:ok, {:roll, 1, 6}}
iex> fun = ExDiceRoller.Compilers.Roll.compile(parse_tree)
iex> fun.(opts: [:explode])
3
iex> fun.(opts: [:explode])
2
iex> fun.(opts: [:explode])
10
iex> import ExDiceRoller.Sigil
iex> ~a/1d10/re
9
iex> ~a/1d10/re
14
## Keeping Dice Rolls
A batch of dice being rolled can be returned as either their sum total, or
as individual results. The former is the default handling of rolls by
ExDiceRoller. The latter, keeping each die rolled, requires the option
`:keep`. Note that a list of die roll results will be returned when using the
`:keep` option.
iex> expr = "5d6"
"5d6"
iex> {:ok, tokens} = ExDiceRoller.Tokenizer.tokenize(expr)
{:ok, [{:int, 1, '5'}, {:roll, 1, 'd'}, {:int, 1, '6'}]}
iex> {:ok, parse_tree} = ExDiceRoller.Parser.parse(tokens)
{:ok, {:roll, 5, 6}}
iex> fun = ExDiceRoller.Compilers.Roll.compile(parse_tree)
iex> fun.(opts: [:keep])
[3, 2, 6, 4, 5]
### Kept Rolls and List Comprehensions
Kept rolls also support list comprehensions. See
`ExDiceRoller.ListComprehension` for more information.
"""
@behaviour ExDiceRoller.Compiler
alias ExDiceRoller.{Args, Compiler, ListComprehension}
@impl true
def compile({:roll, left_expr, right_expr}) do
compile_roll(Compiler.delegate(left_expr), Compiler.delegate(right_expr))
end
@spec compile_roll(Compiler.compiled_val(), Compiler.compiled_val()) :: Compiler.compiled_fun()
defp compile_roll(num, sides) when is_function(num) and is_function(sides) do
fn args -> roll_prep(num.(args), sides.(args), args) end
end
defp compile_roll(num, sides) when is_function(num),
do: fn args -> roll_prep(num.(args), sides, args) end
defp compile_roll(num, sides) when is_function(sides),
do: fn args -> roll_prep(num, sides.(args), args) end
defp compile_roll(num, sides),
do: fn args -> roll_prep(num, sides, args) end
@spec roll_prep(Compiler.calculated_val(), Compiler.calculated_val(), list(atom | tuple)) ::
integer
defp roll_prep(0, _, _), do: 0
defp roll_prep(_, 0, _), do: 0
defp roll_prep(n, s, _) when n < 0 or s < 0 do
raise(ArgumentError, "neither number of dice nor number of sides can be less than 0")
end
defp roll_prep(num, sides, args) do
num = Compiler.round_val(num)
sides = Compiler.round_val(sides)
fun =
case Args.has_option?(args, :keep) do
true -> keep_roll()
false -> normal_roll()
end
explode? = Args.has_option?(args, :explode)
ListComprehension.flattened_apply(num, sides, explode?, fun)
end
defp keep_roll do
fn n, s, e? -> Enum.map(1..n, fn _ -> roll(s, e?) end) end
end
defp normal_roll do
fn n, s, e? -> Enum.reduce(1..n, 0, fn _, acc -> acc + roll(s, e?) end) end
end
@spec roll(integer, boolean) :: integer
defp roll(sides, false) do
:rand.uniform(sides)
end
defp roll(sides, true) do
result = :rand.uniform(sides)
explode_roll(sides, result, result)
end
@spec explode_roll(integer, integer, integer) :: integer
defp explode_roll(_, 1, acc), do: acc
defp explode_roll(sides, sides, acc) do
result = :rand.uniform(sides)
explode_roll(sides, result, acc + result)
end
defp explode_roll(_, _, acc), do: acc
end
|
lib/compilers/roll.ex
| 0.831383
| 0.468304
|
roll.ex
|
starcoder
|
defmodule Wormwood.GQLCase do
@moduledoc """
This module defines a few helpful macros when testing against an Absinthe GraphQL schema.
It essentially registers an Absinthe schema and a GQL document to the module they're called in.
"""
alias Wormwood.GQLLoader
defmacro __using__(_opts) do
quote do
import Wormwood.GQLCase
end
end
@doc """
Call this macro in the module you wish to load your GQL documents in.
It takes 3 arguments, the first is the query_name that you want to query.
```elixir
defmodule MyCoolApplication.MyModule do
load_gql :my_query, MyCoolApplication.MyAbsintheSchema, "assets/js/queries/MyQuery.gql"
# ...
```
After you define the query name at up code, you can call the query with
```elixir
result = query_gql_by(:my_query, variables: %{}, context: %{})
{:ok, query_data} = result
```
"""
defmacro load_gql(query_name, schema, file_path) when is_atom(query_name) do
quote do
document = GQLLoader.load_file!(unquote(file_path))
Module.put_attribute(unquote(__CALLER__.module), :_wormwood_gql_schema, unquote(schema))
Module.register_attribute(unquote(__CALLER__.module), unquote(query_name), persist: true)
Module.put_attribute(unquote(__CALLER__.module), unquote(query_name), document)
end
end
@doc """
Call this macro in the module you wish to load your GQL document in.
It takes 2 arguments, the first is your Absinthe schema module, the second is a path to a GQL file that contains a GraphQL query or mutation.
For example:
```elixir
defmodule MyCoolApplication.MyModule do
load_gql MyCoolApplication.MyAbsintheSchema, "assets/js/queries/MyQuery.gql"
# ...
```
"""
defmacro load_gql(schema, file_path) do
quote do
if Module.get_attribute(unquote(__CALLER__.module), :_wormwood_gql_query) != nil do
raise WormwoodSetupError, reason: :double_declaration
end
document = GQLLoader.load_file!(unquote(file_path))
Module.put_attribute(unquote(__CALLER__.module), :_wormwood_gql_query, document)
Module.put_attribute(unquote(__CALLER__.module), :_wormwood_gql_schema, unquote(schema))
end
end
@doc """
Call this macro in the module where you want to query using a static string as your query.
It takes 2 arguments, the first is your Absinthe schema module, the second is a string of a GQL query or mutation.
This still supports imports, they will be resolved from the current working directory. (Most likely the top level of your app)
For example:
```elixir
defmodule MyCoolApplication.MyModule do
set_gql MyCoolApplication.MyAbsintheSchema, "query { some { cool { gql { id } }}}"
# ...
```
"""
defmacro set_gql(schema, query_string) do
quote do
if Module.get_attribute(unquote(__CALLER__.module), :_wormwood_gql_query) != nil do
raise WormwoodSetupError, reason: :double_declaration
end
document = GQLLoader.load_string!(unquote(query_string))
Module.put_attribute(unquote(__CALLER__.module), :_wormwood_gql_query, document)
Module.put_attribute(unquote(__CALLER__.module), :_wormwood_gql_schema, unquote(schema))
end
end
@doc """
Call this macro in the module you've loaded a document into using `load_gql/3`.
For example:
```elixir
result = query_gql_by(:my_query, variables: %{}, context: %{})
{:ok, query_data} = result
```
"""
defmacro query_gql_by(query_name, options \\ []) do
quote do
attribute =
unquote(__CALLER__.module).__info__(:attributes)[unquote(query_name)]
|> case do
nil -> nil
list -> List.last(list)
end
if is_nil(attribute) do
raise WormwoodSetupError, reason: :missing_declaration
end
Absinthe.run(
attribute,
@_wormwood_gql_schema,
unquote(options)
)
end
end
@doc """
Call this macro in the module you've loaded a document into using `load_gql` or `set_gql`.
Calling this will execute the document loaded into the module against the schema loaded in the module.
It accepts a keyword list for `options` that are passed into [`Absinthe.run/3`](https://hexdocs.pm/absinthe/Absinthe.html#run/3).
Please see the [Absinthe docs](https://hexdocs.pm/absinthe/Absinthe.html#run/3-options) for more information on the options that can be passed to this macro.
Returns a tuple of the query result from the [`Absinthe.run/3`](https://hexdocs.pm/absinthe/Absinthe.html#run/3) call.
For example:
```elixir
result = query_gql(variables: %{}, context: %{})
{:ok, query_data} = result
```
"""
defmacro query_gql(options \\ []) do
quote do
if is_nil(@_wormwood_gql_query) do
raise WormwoodSetupError, reason: :missing_declaration
end
Absinthe.run(
@_wormwood_gql_query,
@_wormwood_gql_schema,
unquote(options)
)
end
end
@doc """
Call this macro in the module you've loaded a document into using `load_gql` or `set_gql`.
Calling this will execute the document loaded into the module against the schema loaded in the module.
Absinthe will use the phases in the "pipeline_phases" list argument when running.
It also accepts a keyword list for `options` that are passed into [`Absinthe.run/3`](https://hexdocs.pm/absinthe/Absinthe.html#run/3).
Please see the [Absinthe docs](https://hexdocs.pm/absinthe/Absinthe.html#run/3-options) for more information on the options that can be passed to this macro.
Returns a tuple of the query result from the [`Absinthe.Pipeline.run/2`](https://hexdocs.pm/absinthe/Absinthe.Pipeline.html#run/2) call.
For example:
```elixir
pipeline = [Absinthe.Phase.Parse, Absinthe.Phase.Blueprint]
result = query_gql_with_pipeline(pipeline)
assert {:ok, %Absinthe.Blueprint{} = _blueprint, _pipeline} = result
```
"""
defmacro query_gql_with_pipeline(pipeline_phases \\ [], options \\ []) do
quote do
if is_nil(@_wormwood_gql_query) do
raise WormwoodSetupError, reason: :missing_declaration
end
options_list =
unquote(options)
|> Keyword.put(:schema, @_wormwood_gql_schema)
|> Absinthe.Pipeline.options()
pipeline = Enum.map(unquote(pipeline_phases), fn phase -> {phase, options_list} end)
Absinthe.Pipeline.run(@_wormwood_gql_query, pipeline)
end
end
end
|
lib/gql_case.ex
| 0.819785
| 0.841956
|
gql_case.ex
|
starcoder
|
defmodule Oban.Validation do
@moduledoc false
@type validator :: ({atom(), term()} -> :ok | {:error, term()})
@doc """
A utility to help validate options without resorting to `throw` or `raise` for control flow.
## Example
Ensure all keys are known and the correct type:
iex> Oban.Validation.validate(name: Oban, fn
...> {:conf, conf} when is_struct(conf) -> :ok
...> {:name, name} when is_atom(name) -> :ok
...> opt -> {:error, "unknown option: " <> inspect(opt)}
...> end)
:ok
"""
@spec validate(atom(), Keyword.t(), validator()) :: :ok | {:error, String.t()}
def validate(parent_key \\ nil, opts, validator)
def validate(_parent_key, opts, validator) when is_list(opts) and is_function(validator, 1) do
Enum.reduce_while(opts, :ok, fn opt, acc ->
case validator.(opt) do
:ok -> {:cont, acc}
{:error, _reason} = error -> {:halt, error}
end
end)
end
def validate(parent_key, opts, _validator) do
{:error, "expected #{inspect(parent_key)} to be a list, got: #{inspect(opts)}"}
end
@doc """
Similar to `validate/2`, but it will raise an `ArgumentError` for any errors.
"""
@spec validate!(opts :: Keyword.t(), validator()) :: :ok
def validate!(opts, validator) do
with {:error, reason} <- validator.(opts), do: raise(ArgumentError, reason)
end
# Shared Validations
@doc false
def validate_integer(key, value, opts \\ []) do
min = Keyword.get(opts, :min, 1)
if is_integer(value) and value > min - 1 do
:ok
else
{:error, "expected #{inspect(key)} to be a positive integer, got: #{inspect(value)}"}
end
end
@doc false
def validate_timezone(key, value) do
if is_binary(value) and match?({:ok, _}, DateTime.now(value)) do
:ok
else
{:error, "expected #{inspect(key)} to be a known timezone, got: #{inspect(value)}"}
end
end
@doc false
def validate_timeout(key, value) do
if (is_integer(value) and value > 0) or value == :infinity do
:ok
else
{:error,
"expected #{inspect(key)} to be a positive integer or :infinity, got: #{inspect(value)}"}
end
end
end
|
lib/oban/validation.ex
| 0.863852
| 0.505798
|
validation.ex
|
starcoder
|
defmodule Instream.Series.Hydrator do
@moduledoc false
alias Instream.Decoder.RFC3339
@doc """
Converts a plain map into a series definition struct.
Keys not defined in the series are silently dropped.
"""
@spec from_map(module, map) :: struct
def from_map(series, data) do
data_fields = Map.take(data, series.__meta__(:fields))
data_tags = Map.take(data, series.__meta__(:tags))
struct(series, %{
fields: struct(Module.safe_concat(series, Fields), data_fields),
tags: struct(Module.safe_concat(series, Tags), data_tags),
timestamp: convert_to_timestamp(data[:time] || data[:timestamp])
})
end
@doc """
Converts a query result map into a list of series definition structs.
Keys not defined in the series are silently dropped.
"""
@spec from_result(module, map | [map]) :: [struct]
def from_result(series, %{
results: [%{series: [%{values: result_values, columns: columns} = data]}]
}) do
# optional :tags set in InfluxQL "GROUP BY" results
tags = Map.get(data, :tags, %{})
Enum.map(result_values, fn values ->
mapped_values =
columns
|> Enum.zip(values)
|> Enum.into(%{}, fn {k, v} -> {String.to_atom(k), v} end)
from_map(series, Map.merge(tags, mapped_values))
end)
end
def from_result(series, rows) when is_list(rows) do
Enum.map(rows, fn row ->
row =
case row["_field"] do
nil -> row
field -> Map.put(row, field, row["_value"])
end
timestamp = row["_time"]
data =
row
|> Map.drop(["_field", "_measurement", "_start", "_stop", "_time", "_value", "table"])
|> Map.new(fn {k, v} -> {String.to_atom(k), v} end)
|> Map.put(:time, timestamp)
from_map(series, data)
end)
end
defp convert_to_timestamp(time) when is_integer(time), do: time
defp convert_to_timestamp(time) when is_binary(time), do: RFC3339.to_nanosecond(time)
defp convert_to_timestamp(_), do: nil
end
|
lib/instream/series/hydrator.ex
| 0.826887
| 0.739681
|
hydrator.ex
|
starcoder
|
defmodule BatchElixir do
alias BatchElixir.RestClient.Transactional
alias BatchElixir.RestClient.Transactional.Message
alias BatchElixir.RestClient.Transactional.Recipients
alias BatchElixir.Serialisation
alias BatchElixir.Server.Producer
@moduledoc """
Documentation for BatchElixir.
Rest client for interating with the **Batch** API.
You have to define in your application this configuration:
```elixir
config :batch_elixir,
rest_api_key: "rest api key", # Required, if not provided the application fail to start
devices: [web: "sdk key", ios: "sdk key", ...], # List of devices that the notification can use. The key name are up to you
default_deeplink: "myapp://",
producer_name: BatchElixir.Server.Producer, # Default, name of the producer is BatchElixir.Server.Producer
consumer_options: [], # Default to empty, extra options like mix/max demand for Genstage
producer_options: [], # extra options for GenStage as producer. Typically [buffer_size: 10_000]
batch_url: "https://api.batch.com/1.1/", # Base url of batch api
retry_interval_in_milliseconds: 1_000, # Interval between each failed requests
max_attempts: 3, # Maximum attempts of failed requests
number_of_consumers: 1, # Number of consumers to pop. By default is 1
stats_driver: BatchElixir.Stats.Memory # BatchElixir.Stats.Memory For In memory stats or BatchElixir.Stats.Statix to send to datadog via Statix
```
"""
@doc """
Send a notifcation to one or more users using the producers/consumers
`custom_payload` can be either a string, structure or a map.
If it's not provide or the value is nil, then no custom_payload will be include to the request.
If the API key for the `device` does not exists return `{:error, reason}`,
otherwise returns `:ok`.
"""
@spec send_notication(
device :: atom(),
group_id :: String.t(),
custom_ids :: [String.t()],
title :: String.t(),
message :: String.t(),
deeplink :: String.t(),
custom_payload :: String.t() | nil,
labels: [String.t()],
gcm_collapse_key_enabled: boolean() | nil
) :: :ok | {:error, String.t()}
def send_notication(
device,
group_id,
custom_ids,
title,
message,
deeplink \\ nil,
custom_payload \\ nil,
labels \\ [],
gcm_collapse_key_enabled \\ nil
)
def send_notication(
device,
group_id,
custom_ids,
title,
message,
nil,
custom_payload,
labels,
gcm_collapse_key_enabled
) do
send_notication(
device,
group_id,
custom_ids,
title,
message,
get_default_deeplink(),
custom_payload,
labels,
gcm_collapse_key_enabled
)
end
def send_notication(
device,
group_id,
custom_ids,
title,
message,
deeplink,
nil,
labels,
gcm_collapse_key_enabled
) do
structure =
create_transactional_structure(
group_id,
custom_ids,
title,
message,
deeplink,
labels,
gcm_collapse_key_enabled
)
do_send_notication(device, structure)
end
def send_notication(
device,
group_id,
custom_ids,
title,
message,
deeplink,
custom_payload,
labels,
gcm_collapse_key_enabled
)
when is_map(custom_payload) do
custom_payload =
custom_payload
|> Serialisation.structure_to_map()
|> Poison.encode!()
send_notication(
device,
group_id,
custom_ids,
title,
message,
deeplink,
custom_payload,
labels,
gcm_collapse_key_enabled
)
end
def send_notication(
device,
group_id,
custom_ids,
title,
message,
deeplink,
custom_payload,
labels,
gcm_collapse_key_enabled
)
when is_binary(custom_payload) do
structure =
create_transactional_structure(
group_id,
custom_ids,
title,
message,
deeplink,
labels,
gcm_collapse_key_enabled
)
structure = %Transactional{structure | custom_payload: custom_payload}
do_send_notication(device, structure)
end
defp do_send_notication(device, transactional) do
do_send_notication_with_api_key(devices()[device], transactional, device)
end
defp do_send_notication_with_api_key(nil, _transactional, device) do
{:error, "No API key found for: #{device}"}
end
defp do_send_notication_with_api_key(api_key, transactional, _device) do
Producer.send_notification(api_key, transactional)
:ok
end
defp create_transactional_structure(
group_id,
custom_ids,
title,
message,
deeplink,
labels,
gcm_collapse_key_enabled
) do
message = %Message{title: title, body: message}
recipients = %Recipients{custom_ids: custom_ids}
%Transactional{
group_id: group_id,
message: message,
recipients: recipients,
deeplink: deeplink,
labels: labels,
gcm_collapse_key: get_gcm_collapse_key(gcm_collapse_key_enabled)
}
end
defp get_gcm_collapse_key(nil), do: nil
defp get_gcm_collapse_key(value) when is_boolean(value), do: %{"enabled" => value}
defp get_default_deeplink, do: Application.fetch_env!(:batch_elixir, :default_deeplink)
defp devices, do: Application.get_env(:batch_elixir, :devices, [])
end
|
lib/batch_elixir.ex
| 0.781664
| 0.530419
|
batch_elixir.ex
|
starcoder
|
defmodule Omise.Schedule do
@moduledoc ~S"""
Provides Schedule API interfaces.
<https://www.omise.co/schedules-api>
"""
use Omise.HTTPClient, endpoint: "schedules"
defstruct object: "schedule",
id: nil,
livemode: nil,
location: nil,
status: nil,
every: nil,
period: nil,
on: nil,
in_words: nil,
start_date: nil,
end_date: nil,
charge: %{},
transfer: %{},
occurrences: %Omise.List{data: [%Omise.Occurrence{}]},
next_occurrence_dates: nil,
created: nil
@type t :: %__MODULE__{
object: String.t(),
id: String.t(),
livemode: boolean,
location: String.t(),
status: String.t(),
every: integer,
period: String.t(),
on: map,
in_words: String.t(),
start_date: String.t(),
end_date: String.t(),
charge: map,
occurrences: Omise.List.t(),
next_occurrence_dates: list,
created: String.t()
}
@doc ~S"""
List all schedules.
Returns `{:ok, schedules}` if the request is successful, `{:error, error}` otherwise.
## Query Parameters:
* `offset` - (optional, default: 0) The offset of the first record returned.
* `limit` - (optional, default: 20, maximum: 100) The maximum amount of records returned.
* `from` - (optional, default: 1970-01-01T00:00:00Z, format: ISO 8601) The UTC date and time limiting the beginning of returned records.
* `to` - (optional, default: current UTC Datetime, format: ISO 8601) The UTC date and time limiting the end of returned records.
## Examples
Omise.Schedule.list(limit: 10)
"""
@spec list(Keyword.t(), Keyword.t()) :: {:ok, Omise.List.t()} | {:error, Omise.Error.t()}
def list(params \\ [], opts \\ []) do
opts = Keyword.merge(opts, as: %Omise.List{data: [%__MODULE__{}]})
get(@endpoint, params, opts)
end
@doc ~S"""
List all occurrences of a schedule.
Returns `{:ok, occurrences}` if the request is successful, `{:error, error}` otherwise.
## Query Parameters:
* `offset` - (optional, default: 0) The offset of the first record returned.
* `limit` - (optional, default: 20, maximum: 100) The maximum amount of records returned.
* `from` - (optional, default: 1970-01-01T00:00:00Z, format: ISO 8601) The UTC date and time limiting the beginning of returned records.
* `to` - (optional, default: current UTC Datetime, format: ISO 8601) The UTC date and time limiting the end of returned records.
## Examples
Omise.Schedule.list_occurrences("schd_test_584yqgiuavbzrfng7mv")
"""
@spec list_occurrences(String.t(), Keyword.t(), Keyword.t()) :: {:ok, Omise.List.t()} | {:error, Omise.Error.t()}
def list_occurrences(id, params \\ [], opts \\ []) do
opts = Keyword.merge(opts, as: %Omise.List{data: [%Omise.Occurrence{}]})
get("#{@endpoint}/#{id}/occurrences", params, opts)
end
@doc ~S"""
Retrieve a schedule.
## Examples
Omise.Schedule.retrieve("schd_test_5850ga4l8a6r6bgj4oj")
"""
@spec retrieve(String.t(), Keyword.t()) :: {:ok, t} | {:error, Omise.Error.t()}
def retrieve(id, opts \\ []) do
opts = Keyword.merge(opts, as: %__MODULE__{})
get("#{@endpoint}/#{id}", [], opts)
end
@doc ~S"""
Create a schedule.
Returns `{:ok, schedule}` if the request is successful, `{:error, error}` otherwise.
## Request Parameters:
* `every` - How often the schedule runs. E.g.: Every `3` weeks.
* `period` - `day`, `week` or `month`. E.g.: Every 3 `week`s.
* `on` - This hash keys depend on the period.
* `start_date` - (optional) When the schedule should start, in ISO 8601 format (YYYY-MM-DD). Defaults to today.
* `end_date` - When the schedule should end, in ISO 8601 format (YYYY-MM-DD).
* `capture` - (optional) Whether or not you want the charge to be captured right away, when not specified it is set to true.
* `charge` - A charge map. (for charge schedule)
* `transfer` - A transfer map. (for transfer schedule)
## Examples
# Charge a specific customer card every 2 days
Omise.Schedule.create(
every: 2,
period: "day",
start_date: "2017-06-5",
end_date: "2018-05-01",
charge: [
customer: "<KEY>",
amount: 199_00,
description: "Membership fee",
]
)
# Charge every Monday and Friday
Omise.Schedule.create(
every: 1,
period: "week",
on: [
weekdays: ["monday", "friday"]
],
start_date: "2017-06-5",
end_date: "2018-05-01",
charge: [
customer: "<KEY>",
amount: 199_00,
description: "Membership fee",
]
)
# Charge on the 1st, 10th and 15th every 3 months
Omise.Schedule.create(
every: 3,
period: "month",
on: [
days_of_month: [1, 10, 15]
],
start_date: "2017-06-5",
end_date: "2018-05-01",
charge: [
customer: "<KEY>",
amount: 199_00,
description: "Membership fee",
]
)
# Charge on the 2nd Monday every month
Omise.Schedule.create(
every: 1,
period: "month",
on: [
weekday_of_month: "second_monday"
],
start_date: "2017-06-5",
end_date: "2018-05-01",
charge: [
customer: "<KEY>",
amount: 199_00,
description: "Membership fee",
]
)
# Transfer a fixed amount every 2 days
Omise.Schedule.create(
every: 2,
period: "day",
start_date: "2017-07-8",
end_date: "2017-07-31",
transfer: [
recipient: "recp_test_55j2an6c8fd07xbccd3",
amount: 100_00_00,
]
)
# Transfer a percentage of the balance every Monday and Friday
Omise.Schedule.create(
every: 1,
period: "week",
on: [
weekdays: ["monday", "friday"],
],
start_date: "2017-07-8",
end_date: "2017-07-31",
transfer: [
recipient: "recp_test_556jkf7u174ptxuytac",
percentage_of_balance: 75,
]
)
"""
@spec create(Keyword.t(), Keyword.t()) :: {:ok, t} | {:error, Omise.Error.t()}
def create(params, opts \\ []) do
opts = Keyword.merge(opts, as: %__MODULE__{})
post(@endpoint, params, opts)
end
@doc ~S"""
Destroy a schedule.
Returns `{:ok, schedule}` if the request is successful, `{:error, error}` otherwise.
## Examples
Omise.Schedule.destroy("schd_test_584yqgiuavbzrfng7mv")
"""
@spec destroy(String.t(), Keyword.t()) :: {:ok, t} | {:error, Omise.Error.t()}
def destroy(id, opts \\ []) do
opts = Keyword.merge(opts, as: %__MODULE__{})
delete("#{@endpoint}/#{id}", opts)
end
end
|
lib/omise/schedule.ex
| 0.923592
| 0.444022
|
schedule.ex
|
starcoder
|
defmodule OMG.ChildChain.BlockQueue do
@moduledoc """
Manages the process of submitting new blocks to the root chain contract.
On startup uses information persisted in `OMG.DB` and the root chain contract to recover the status of the
submissions.
Tracks the current Ethereum height as well as the mined child chain block submissions to date. Based on this,
`OMG.ChildChain.BlockQueue.Core` triggers the forming of a new child chain block (by `OMG.State.form_block`).
Listens to newly formed blocks to enqueue arriving via an `OMG.Bus` subscription, hands them off to
`OMG.ChildChain.BlockQueue.Core` to track and submits them with appropriate gas price.
Uses `OMG.ChildChain.BlockQueue.Core` to determine whether to resubmit blocks not yet mined on the root chain with
a higher gas price.
Receives responses from the Ethereum RPC (or another submitting agent) and uses `OMG.ChildChain.BlockQueue.Core` to
determine what they mean to the process of submitting - see `OMG.ChildChain.BlockQueue.Core.process_submit_result/3`
for details.
See `OMG.ChildChain.BlockQueue.Core` for the implementation of the business logic for the block queue.
Handles timing of calls to root chain.
Driven by block height and mined transaction data delivered by local geth node and new blocks
formed by server. Resubmits transaction until it is mined.
"""
use OMG.Utils.LoggerExt
alias OMG.Block
alias OMG.ChildChain.BlockQueue.Balance
alias OMG.ChildChain.BlockQueue.Core
alias OMG.ChildChain.BlockQueue.Core.BlockSubmission
alias OMG.ChildChain.BlockQueue.GasAnalyzer
alias OMG.ChildChain.BlockQueue.GasPriceAdjustment
alias OMG.Eth
alias OMG.Eth.Client
alias OMG.Eth.Encoding
alias OMG.Eth.EthereumHeight
alias OMG.Eth.RootChain
@type eth_height() :: non_neg_integer()
@type hash() :: BlockSubmission.hash()
@type plasma_block_num() :: BlockSubmission.plasma_block_num()
# child chain block number, as assigned by plasma contract
@type encoded_signed_tx() :: binary()
use GenServer
def start_link(args) do
GenServer.start_link(__MODULE__, args, name: __MODULE__)
end
@doc """
Initializes the GenServer state, most work done in `handle_continue/2`.
"""
def init(args) do
{:ok, args, {:continue, :setup}}
end
@doc """
Reads the status of submitting from `OMG.DB` (the blocks persisted there). Iniitializes the state based on this and
configuration.
In particular it re-enqueues any blocks whose submissions have not yet been seen mined.
"""
def handle_continue(:setup, args) do
_ = Logger.info("Starting #{__MODULE__} service.")
:ok = Client.node_ready()
finality_threshold = Keyword.fetch!(args, :submission_finality_margin)
child_block_interval = Keyword.fetch!(args, :child_block_interval)
contract_deployment_height = Keyword.fetch!(args, :contract_deployment_height)
{:ok, parent_height} = EthereumHeight.get()
mined_num = RootChain.get_mined_child_block()
{top_mined_hash, _} = RootChain.blocks(mined_num)
{:ok, stored_child_top_num} = OMG.DB.get_single_value(:child_top_block_number)
_ =
Logger.info(
"Starting BlockQueue at " <>
"parent_height: #{inspect(parent_height)}, parent_start: #{inspect(contract_deployment_height)}, " <>
"mined_child_block: #{inspect(mined_num)}, stored_child_top_block: #{inspect(stored_child_top_num)}"
)
range =
Core.child_block_nums_to_init_with(mined_num, stored_child_top_num, child_block_interval, finality_threshold)
{:ok, known_hashes} = OMG.DB.block_hashes(range)
_ = Logger.info("Starting BlockQueue, top_mined_hash: #{inspect(Encoding.to_hex(top_mined_hash))}")
block_submit_every_nth = Keyword.fetch!(args, :block_submit_every_nth)
block_submit_max_gas_price = Keyword.fetch!(args, :block_submit_max_gas_price)
gas_price_adj_params = %GasPriceAdjustment{max_gas_price: block_submit_max_gas_price}
core =
Core.new(
mined_child_block_num: mined_num,
known_hashes: Enum.zip(range, known_hashes),
top_mined_hash: top_mined_hash,
parent_height: parent_height,
child_block_interval: child_block_interval,
block_submit_every_nth: block_submit_every_nth,
finality_threshold: finality_threshold,
gas_price_adj_params: gas_price_adj_params
)
{:ok, state} =
case core do
{:ok, _state} = result ->
result
{:error, reason} = error when reason == :mined_blknum_not_found_in_db or reason == :contract_ahead_of_db ->
_ =
log_init_error(
known_hashes: known_hashes,
parent_height: parent_height,
mined_num: mined_num,
stored_child_top_num: stored_child_top_num
)
error
end
interval = Keyword.fetch!(args, :block_queue_eth_height_check_interval_ms)
{:ok, _} = :timer.send_interval(interval, self(), :check_ethereum_status)
# `link: true` because we want the `BlockQueue` to restart and resubscribe, if the bus crashes
:ok = OMG.Bus.subscribe({:child_chain, "blocks"}, link: true)
metrics_collection_interval = Keyword.fetch!(args, :metrics_collection_interval)
{:ok, _} = :timer.send_interval(metrics_collection_interval, self(), :send_metrics)
_ = Logger.info("Started #{inspect(__MODULE__)}")
{:noreply, state}
end
def handle_info(:send_metrics, state) do
:ok = :telemetry.execute([:process, __MODULE__], %{}, state)
{:noreply, state}
end
@doc """
Checks the status of the Ethereum root chain, the top mined child block number
and status of State to decide what to do.
`OMG.ChildChain.BlockQueue.Core` decides whether a new block should be formed or not.
"""
def handle_info(:check_ethereum_status, state) do
{:ok, ethereum_height} = EthereumHeight.get()
mined_blknum = RootChain.get_mined_child_block()
{_, is_empty_block} = OMG.State.get_status()
_ = Logger.debug("Ethereum at \#'#{inspect(ethereum_height)}', mined child at \#'#{inspect(mined_blknum)}'")
state1 =
case Core.set_ethereum_status(state, ethereum_height, mined_blknum, is_empty_block) do
{:do_form_block, state1} ->
:ok = OMG.State.form_block()
state1
{:dont_form_block, state1} ->
state1
end
submit_blocks(state1)
{:noreply, state1}
end
@doc """
Lines up a new block for submission. Presumably `OMG.State.form_block` wrote to the `:internal_event_bus` having
formed a new child chain block.
"""
def handle_info({:internal_event_bus, :enqueue_block, %Block{} = block}, state) do
{:ok, parent_height} = EthereumHeight.get()
state1 = Core.enqueue_block(state, block.hash, block.number, parent_height)
_ = Logger.info("Enqueuing block num '#{inspect(block.number)}', hash '#{inspect(Encoding.to_hex(block.hash))}'")
submit_blocks(state1)
{:noreply, state1}
end
# private (server)
@spec submit_blocks(Core.t()) :: :ok
defp submit_blocks(state) do
state
|> Core.get_blocks_to_submit()
|> Enum.each(&submit(&1))
end
defp submit(submission) do
_ = Logger.info("Submitting: #{inspect(submission)}")
submit_result = Eth.submit_block(submission.hash, submission.nonce, submission.gas_price)
newest_mined_blknum = RootChain.get_mined_child_block()
final_result = Core.process_submit_result(submission, submit_result, newest_mined_blknum)
final_result =
case final_result do
{:error, _} = error ->
_ = log_eth_node_error()
error
{:ok, txhash} ->
_ = GasAnalyzer.enqueue(txhash)
_ = Balance.check()
:ok
:ok ->
:ok
end
:ok = final_result
end
defp log_init_error(fields) do
fields = Keyword.update!(fields, :known_hashes, fn hashes -> Enum.map(hashes, &Encoding.to_hex/1) end)
_ =
Logger.error(
"The child chain might have not been wiped clean when starting a child chain from scratch: " <>
"#{inspect(fields)}. Check README.MD and follow the setting up child chain."
)
log_eth_node_error()
end
defp log_eth_node_error() do
eth_node_diagnostics = get_node_diagnostics()
Logger.error("Ethereum operation failed, additional diagnostics: #{inspect(eth_node_diagnostics)}")
end
defp get_node_diagnostics() do
Enum.into(["personal_listWallets", "admin_nodeInfo", "parity_enode"], %{}, &get_node_diagnostic/1)
end
defp get_node_diagnostic(rpc_call_name) when is_binary(rpc_call_name) do
{rpc_call_name, Ethereumex.HttpClient.request(rpc_call_name, [], [])}
rescue
error -> {rpc_call_name, inspect(error)}
end
end
|
apps/omg_child_chain/lib/omg_child_chain/block_queue.ex
| 0.874097
| 0.635279
|
block_queue.ex
|
starcoder
|
defmodule Mix.Tasks.Uiar do
@moduledoc """
Formats given source codes to follow coding styles below:
* Directives are grouped and ordered as `use`, `import`, `alias` and `require`.
* Each group of directive is separated by an empty line.
* Directives of the same group are ordered alphabetically.
* If a directive handles multiple modules with `{}`, they are alphabetically ordered.
Files can be passed by command line arguments:
mix uiar mix.exs "lib/**/*.{ex,exs}" "test/**/*.{ex,exs}"
If no arguments are passed, they are taken from `inputs` of `.formatter.exs`.
If `--check-formatted` option is given, it doesn't modify files but raises an error if not formatted yet.
"""
use Mix.Task
@impl Mix.Task
def run(args) do
{opts, inputs} = OptionParser.parse!(args, strict: [check_formatted: :boolean])
do_run(inputs, opts)
end
defp do_run(inputs, check_formatted: true) do
for path <- paths(inputs) do
source = File.read!(path)
case Uiar.format(source) do
{:ok, ^source} -> nil
_ -> path
end
end
|> Enum.reject(&is_nil/1)
|> case do
[] ->
:ok
non_formatted_paths ->
Mix.raise("""
The following files were not formatted on the basis of uiar rules:
#{to_bullet_list(non_formatted_paths)}
""")
end
end
defp do_run(inputs, _opts) do
for path <- paths(inputs) do
source = File.read!(path)
case Uiar.format(source) do
{:ok, formatted} ->
File.write!(path, formatted)
{:error, errors} ->
Mix.raise("Failed: path=#{path}, errors=#{inspect(errors)}")
end
end
end
defp paths([]) do
{formatter, _} = Code.eval_file(".formatter.exs")
case Keyword.get(formatter, :inputs, []) do
[] -> Mix.raise("No paths found.")
inputs -> paths(inputs)
end
end
defp paths(inputs) do
inputs
|> Enum.flat_map(&Path.wildcard/1)
|> Enum.uniq()
end
defp to_bullet_list(paths) do
Enum.map_join(paths, "\n", &" * #{&1}")
end
end
|
lib/mix/tasks/uiar.ex
| 0.841809
| 0.447098
|
uiar.ex
|
starcoder
|
defmodule BitstylesPhoenix.Showcase do
@moduledoc false
import Phoenix.HTML, only: [safe_to_string: 1]
import Phoenix.HTML.Tag, only: [content_tag: 3]
@doctest_entries ["iex>", "...>"]
defmacro story(name, example, opts \\ []) do
code =
example
|> to_string()
|> String.split("\n")
|> Enum.map(&String.trim/1)
|> Enum.reject(fn line -> Enum.any?(@doctest_entries, &String.starts_with?(line, &1)) end)
|> Enum.join("\n")
storydoc = """
## #{name}
#{sandbox(code, opts)}
#{example}
"""
extra_html = Keyword.get(opts, :extra_html)
storydoc =
if extra_html && Keyword.get(opts, :show_extra_html, true) do
storydoc <>
"""
*Requires additional content on the page:*
```
#{extra_html}
```
"""
else
storydoc
end
if Keyword.get(opts, :module, false) do
quote do
@moduledoc @moduledoc <> unquote(storydoc)
end
else
quote do
@doc @doc <> unquote(storydoc)
end
end
end
@default_iframe_style """
height:1px; \
border:none; \
overflow:hidden; \
padding-left: 1em; \
"""
defp sandbox(code, opts) do
extra_html = Keyword.get(opts, :extra_html, "")
transparent = Keyword.get(opts, :transparent, true)
{result, _} = Code.eval_string(code)
dist = BitstylesPhoenix.Bitstyles.cdn_url()
style =
if transparent do
"""
html{ \
background-color: transparent !important; \
} \
\
@media (prefers-color-scheme: dark) { \
body {color: #fff; } \
} \
"""
else
""
end
iframe_opts =
[
srcdoc:
~s(<html><head><style>#{style}</style><link rel="stylesheet" href="#{dist}/build/bitstyles.css"></head><body>#{Enum.join([extra_html, result]) |> String.replace("\n", "")}</body></html>),
style: "",
allowtransparency: if(transparent, do: "true", else: "false")
]
|> Keyword.merge(style_opts(opts))
if dist do
safe_to_string(content_tag(:iframe, "", iframe_opts))
else
""
end
end
defp style_opts(opts) do
width = Keyword.get(opts, :width, "auto")
Keyword.get(opts, :height)
|> case do
nil ->
[
style: "#{@default_iframe_style}width: #{width}",
# https://stackoverflow.com/questions/819416/adjust-width-and-height-of-iframe-to-fit-with-content-in-it
onload: """
javascript:(function(o) { \
o.style.height=(o.contentWindow.document.body.scrollHeight + 25)+"px"; \
}(this)); \
"""
]
height ->
[style: "#{@default_iframe_style}height: #{height}; width: #{width};"]
end
end
end
|
lib/bitstyles_phoenix/showcase.ex
| 0.614394
| 0.423726
|
showcase.ex
|
starcoder
|
defmodule :erl_syntax do
# Types
@type forms :: (syntaxTree() | [syntaxTree()])
@type padding :: (:none | integer())
@type syntaxTree :: (tree() | wrapper() | erl_parse())
@type syntaxTreeAttributes :: attr()
# Private Types
@typep encoding :: (:utf8 | :unicode | :latin1)
@typep erl_parse :: (:erl_parse.abstract_clause() | :erl_parse.abstract_expr() | :erl_parse.abstract_form() | :erl_parse.abstract_type() | :erl_parse.form_info() | {:bin_element, _, _, _, _})
@typep guard :: (:none | syntaxTree() | [syntaxTree()] | [[syntaxTree()]])
# Functions
@spec abstract(term()) :: syntaxTree()
def abstract([h | t] = l) when is_integer(h) do
case is_printable(l) do
true ->
string(l)
false ->
abstract_tail(h, t)
end
end
def abstract([h | t]), do: abstract_tail(h, t)
def abstract(t) when is_atom(t), do: atom(t)
def abstract(t) when is_integer(t), do: integer(t)
def abstract(t) when is_float(t), do: make_float(t)
def abstract([]), do: apply(__MODULE__, nil, [])
def abstract(t) when is_tuple(t), do: tuple(abstract_list(tuple_to_list(t)))
def abstract(t) when is_map(t) do
map_expr((for {key, value} <- :maps.to_list(t) do
map_field_assoc(abstract(key), abstract(value))
end))
end
def abstract(t) when is_binary(t) do
binary((for b <- binary_to_list(t) do
binary_field(integer(b))
end))
end
def abstract(t), do: :erlang.error({:badarg, t})
@spec add_ann(term(), syntaxTree()) :: syntaxTree()
def add_ann(a, node) do
case node do
tree(attr: attr) ->
tree(node, attr: attr(attr, ann: [a | attr(attr, :ann)]))
wrapper(attr: attr) ->
wrapper(node, attr: attr(attr, ann: [a | attr(attr, :ann)]))
_ ->
add_ann(a, wrap(node))
end
end
@spec add_postcomments([syntaxTree()], syntaxTree()) :: syntaxTree()
def add_postcomments(cs, node) do
case node do
tree(attr: attr) ->
tree(node, attr: add_postcomments_1(cs, attr))
wrapper(attr: attr) ->
wrapper(node, attr: add_postcomments_1(cs, attr))
_ ->
add_postcomments(cs, wrap(node))
end
end
@spec add_precomments([syntaxTree()], syntaxTree()) :: syntaxTree()
def add_precomments(cs, node) do
case node do
tree(attr: attr) ->
tree(node, attr: add_precomments_1(cs, attr))
wrapper(attr: attr) ->
wrapper(node, attr: add_precomments_1(cs, attr))
_ ->
add_precomments(cs, wrap(node))
end
end
@spec annotated_type(syntaxTree(), syntaxTree()) :: syntaxTree()
def annotated_type(name, type), do: tree(:annotated_type, annotated_type(name: name, body: type))
@spec annotated_type_body(syntaxTree()) :: syntaxTree()
def annotated_type_body(node) do
case unwrap(node) do
{:ann_type, _, [_, type]} ->
type
node1 ->
annotated_type(data(node1), :body)
end
end
@spec annotated_type_name(syntaxTree()) :: syntaxTree()
def annotated_type_name(node) do
case unwrap(node) do
{:ann_type, _, [name, _]} ->
name
node1 ->
annotated_type(data(node1), :name)
end
end
@spec application(syntaxTree(), [syntaxTree()]) :: syntaxTree()
def application(operator, arguments), do: tree(:application, application(operator: operator, arguments: arguments))
@spec application((:none | syntaxTree()), syntaxTree(), [syntaxTree()]) :: syntaxTree()
def application(:none, name, arguments), do: application(name, arguments)
def application(module, name, arguments), do: application(module_qualifier(module, name), arguments)
@spec application_arguments(syntaxTree()) :: [syntaxTree()]
def application_arguments(node) do
case unwrap(node) do
{:call, _, _, arguments} ->
arguments
node1 ->
application(data(node1), :arguments)
end
end
@spec application_operator(syntaxTree()) :: syntaxTree()
def application_operator(node) do
case unwrap(node) do
{:call, _, operator, _} ->
operator
node1 ->
application(data(node1), :operator)
end
end
@spec arity_qualifier(syntaxTree(), syntaxTree()) :: syntaxTree()
def arity_qualifier(body, arity), do: tree(:arity_qualifier, arity_qualifier(body: body, arity: arity))
@spec arity_qualifier_argument(syntaxTree()) :: syntaxTree()
def arity_qualifier_argument(node), do: arity_qualifier(data(node), :arity)
@spec arity_qualifier_body(syntaxTree()) :: syntaxTree()
def arity_qualifier_body(node), do: arity_qualifier(data(node), :body)
@spec atom((atom() | charlist())) :: syntaxTree()
def atom(name) when is_atom(name), do: tree(:atom, name)
def atom(name), do: tree(:atom, list_to_atom(name))
@spec atom_literal(syntaxTree()) :: charlist()
def atom_literal(node), do: atom_literal(node, :latin1)
def atom_literal(node, :utf8), do: :io_lib.write_atom(atom_value(node))
def atom_literal(node, :unicode), do: :io_lib.write_atom(atom_value(node))
def atom_literal(node, :latin1), do: :io_lib.write_atom_as_latin1(atom_value(node))
@spec atom_name(syntaxTree()) :: charlist()
def atom_name(node), do: atom_to_list(atom_value(node))
@spec atom_value(syntaxTree()) :: atom()
def atom_value(node) do
case unwrap(node) do
{:atom, _, name} ->
name
node1 ->
data(node1)
end
end
@spec attribute(syntaxTree()) :: syntaxTree()
def attribute(name), do: attribute(name, :none)
@spec attribute(syntaxTree(), (:none | [syntaxTree()])) :: syntaxTree()
def attribute(name, args), do: tree(:attribute, attribute(name: name, args: args))
@spec attribute_arguments(syntaxTree()) :: (:none | [syntaxTree()])
def attribute_arguments(node), do: ...
@spec attribute_name(syntaxTree()) :: syntaxTree()
def attribute_name(node) do
case unwrap(node) do
{:attribute, pos, name, _} ->
set_pos(atom(name), pos)
node1 ->
attribute(data(node1), :name)
end
end
@spec binary([syntaxTree()]) :: syntaxTree()
def binary(list), do: tree(:binary, list)
@spec binary_comp(syntaxTree(), [syntaxTree()]) :: syntaxTree()
def binary_comp(template, body), do: tree(:binary_comp, binary_comp(template: template, body: body))
@spec binary_comp_body(syntaxTree()) :: [syntaxTree()]
def binary_comp_body(node) do
case unwrap(node) do
{:bc, _, _, body} ->
body
node1 ->
binary_comp(data(node1), :body)
end
end
@spec binary_comp_template(syntaxTree()) :: syntaxTree()
def binary_comp_template(node) do
case unwrap(node) do
{:bc, _, template, _} ->
template
node1 ->
binary_comp(data(node1), :template)
end
end
@spec binary_field(syntaxTree()) :: syntaxTree()
def binary_field(body), do: binary_field(body, [])
@spec binary_field(syntaxTree(), [syntaxTree()]) :: syntaxTree()
def binary_field(body, types), do: tree(:binary_field, binary_field(body: body, types: types))
@spec binary_field(syntaxTree(), (:none | syntaxTree()), [syntaxTree()]) :: syntaxTree()
def binary_field(body, :none, types), do: binary_field(body, types)
def binary_field(body, size, types), do: binary_field(size_qualifier(body, size), types)
@spec binary_field_body(syntaxTree()) :: syntaxTree()
def binary_field_body(node) do
case unwrap(node) do
{:bin_element, _, body, size, _} ->
cond do
size === :default ->
body
true ->
size_qualifier(body, size)
end
node1 ->
binary_field(data(node1), :body)
end
end
@spec binary_field_size(syntaxTree()) :: (:none | syntaxTree())
def binary_field_size(node) do
case unwrap(node) do
{:bin_element, _, _, size, _} ->
cond do
size === :default ->
:none
true ->
size
end
node1 ->
body = binary_field(data(node1), :body)
case type(body) do
:size_qualifier ->
size_qualifier_argument(body)
_ ->
:none
end
end
end
@spec binary_field_types(syntaxTree()) :: [syntaxTree()]
def binary_field_types(node) do
case unwrap(node) do
{:bin_element, pos, _, _, types} ->
cond do
types === :default ->
[]
true ->
unfold_binary_field_types(types, pos)
end
node1 ->
binary_field(data(node1), :types)
end
end
@spec binary_fields(syntaxTree()) :: [syntaxTree()]
def binary_fields(node) do
case unwrap(node) do
{:bin, _, list} ->
list
node1 ->
data(node1)
end
end
@spec binary_generator(syntaxTree(), syntaxTree()) :: syntaxTree()
def binary_generator(pattern, body), do: tree(:binary_generator, binary_generator(pattern: pattern, body: body))
@spec binary_generator_body(syntaxTree()) :: syntaxTree()
def binary_generator_body(node) do
case unwrap(node) do
{:b_generate, _, _, body} ->
body
node1 ->
binary_generator(data(node1), :body)
end
end
@spec binary_generator_pattern(syntaxTree()) :: syntaxTree()
def binary_generator_pattern(node) do
case unwrap(node) do
{:b_generate, _, pattern, _} ->
pattern
node1 ->
binary_generator(data(node1), :pattern)
end
end
@spec bitstring_type(syntaxTree(), syntaxTree()) :: syntaxTree()
def bitstring_type(m, n), do: tree(:bitstring_type, bitstring_type(m: m, n: n))
@spec bitstring_type_m(syntaxTree()) :: syntaxTree()
def bitstring_type_m(node) do
case unwrap(node) do
{:type, _, :binary, [m, _]} ->
m
node1 ->
bitstring_type(data(node1), :m)
end
end
@spec bitstring_type_n(syntaxTree()) :: syntaxTree()
def bitstring_type_n(node) do
case unwrap(node) do
{:type, _, :binary, [_, n]} ->
n
node1 ->
bitstring_type(data(node1), :n)
end
end
@spec block_expr([syntaxTree()]) :: syntaxTree()
def block_expr(body), do: tree(:block_expr, body)
@spec block_expr_body(syntaxTree()) :: [syntaxTree()]
def block_expr_body(node) do
case unwrap(node) do
{:block, _, body} ->
body
node1 ->
data(node1)
end
end
@spec case_expr(syntaxTree(), [syntaxTree()]) :: syntaxTree()
def case_expr(argument, clauses), do: tree(:case_expr, case_expr(argument: argument, clauses: clauses))
@spec case_expr_argument(syntaxTree()) :: syntaxTree()
def case_expr_argument(node) do
case unwrap(node) do
{:case, _, argument, _} ->
argument
node1 ->
case_expr(data(node1), :argument)
end
end
@spec case_expr_clauses(syntaxTree()) :: [syntaxTree()]
def case_expr_clauses(node) do
case unwrap(node) do
{:case, _, _, clauses} ->
clauses
node1 ->
case_expr(data(node1), :clauses)
end
end
@spec catch_expr(syntaxTree()) :: syntaxTree()
def catch_expr(expr), do: tree(:catch_expr, expr)
@spec catch_expr_body(syntaxTree()) :: syntaxTree()
def catch_expr_body(node) do
case unwrap(node) do
{:catch, _, expr} ->
expr
node1 ->
data(node1)
end
end
@spec char(char()) :: syntaxTree()
def char(char), do: tree(:char, char)
@spec char_literal(syntaxTree()) :: [char(), ...]
def char_literal(node), do: char_literal(node, :latin1)
@spec char_literal(syntaxTree(), encoding()) :: [char(), ...]
def char_literal(node, :unicode), do: :io_lib.write_char(char_value(node))
def char_literal(node, :utf8), do: :io_lib.write_char(char_value(node))
def char_literal(node, :latin1), do: :io_lib.write_char_as_latin1(char_value(node))
@spec char_value(syntaxTree()) :: char()
def char_value(node) do
case unwrap(node) do
{:char, _, char} ->
char
node1 ->
data(node1)
end
end
@spec class_qualifier(syntaxTree(), syntaxTree()) :: syntaxTree()
def class_qualifier(class, body), do: tree(:class_qualifier, class_qualifier(class: class, body: body))
@spec class_qualifier_argument(syntaxTree()) :: syntaxTree()
def class_qualifier_argument(node), do: class_qualifier(data(node), :class)
@spec class_qualifier_body(syntaxTree()) :: syntaxTree()
def class_qualifier_body(node), do: class_qualifier(data(node), :body)
@spec clause(guard(), [syntaxTree()]) :: syntaxTree()
def clause(guard, body), do: clause([], guard, body)
@spec clause([syntaxTree()], guard(), [syntaxTree()]) :: syntaxTree()
def clause(patterns, guard, body) do
guard1 = case guard do
[] ->
:none
[x | _] when is_list(x) ->
disjunction(conjunction_list(guard))
[_ | _] ->
conjunction(guard)
_ ->
guard
end
tree(:clause, clause(patterns: patterns, guard: guard1, body: body))
end
@spec clause_body(syntaxTree()) :: [syntaxTree()]
def clause_body(node) do
case unwrap(node) do
{:clause, _, _, _, body} ->
body
node1 ->
clause(data(node1), :body)
end
end
@spec clause_guard(syntaxTree()) :: (:none | syntaxTree())
def clause_guard(node) do
case unwrap(node) do
{:clause, _, _, guard, _} ->
case guard do
[] ->
:none
[l | _] when is_list(l) ->
disjunction(conjunction_list(guard))
[_ | _] ->
conjunction(guard)
end
node1 ->
clause(data(node1), :guard)
end
end
@spec clause_patterns(syntaxTree()) :: [syntaxTree()]
def clause_patterns(node) do
case unwrap(node) do
{:clause, _, patterns, _, _} ->
patterns
node1 ->
clause(data(node1), :patterns)
end
end
@spec comment([charlist()]) :: syntaxTree()
def comment(strings), do: comment(:none, strings)
@spec comment(padding(), [charlist()]) :: syntaxTree()
def comment(pad, strings), do: tree(:comment, comment(pad: pad, text: strings))
@spec comment_padding(syntaxTree()) :: padding()
def comment_padding(node), do: comment(data(node), :pad)
@spec comment_text(syntaxTree()) :: [charlist()]
def comment_text(node), do: comment(data(node), :text)
@spec compact_list(syntaxTree()) :: syntaxTree()
def compact_list(node) do
case type(node) do
:list ->
case list_suffix(node) do
:none ->
node
tail ->
case type(tail) do
:list ->
tail1 = compact_list(tail)
node1 = list(list_prefix(node) ++ list_prefix(tail1), list_suffix(tail1))
join_comments(tail1, copy_attrs(node, node1))
nil ->
node1 = list(list_prefix(node))
join_comments(tail, copy_attrs(node, node1))
_ ->
node
end
end
_ ->
node
end
end
@spec concrete(syntaxTree()) :: term()
def concrete(node), do: ...
@spec cond_expr([syntaxTree()]) :: syntaxTree()
def cond_expr(clauses), do: tree(:cond_expr, clauses)
@spec cond_expr_clauses(syntaxTree()) :: [syntaxTree()]
def cond_expr_clauses(node) do
case unwrap(node) do
{:cond, _, clauses} ->
clauses
node1 ->
data(node1)
end
end
@spec conjunction([syntaxTree()]) :: syntaxTree()
def conjunction(tests), do: tree(:conjunction, tests)
@spec conjunction_body(syntaxTree()) :: [syntaxTree()]
def conjunction_body(node), do: data(node)
@spec cons(syntaxTree(), syntaxTree()) :: syntaxTree()
def cons(head, tail) do
case type(tail) do
:list ->
copy_comments(tail, list([head | list_prefix(tail)], list_suffix(tail)))
nil ->
copy_comments(tail, list([head]))
_ ->
list([head], tail)
end
end
@spec constrained_function_type(syntaxTree(), [syntaxTree()]) :: syntaxTree()
def constrained_function_type(functionType, functionConstraint) do
conj = conjunction(functionConstraint)
tree(:constrained_function_type, constrained_function_type(body: functionType, argument: conj))
end
@spec constrained_function_type_argument(syntaxTree()) :: syntaxTree()
def constrained_function_type_argument(node) do
case unwrap(node) do
{:type, _, :bounded_fun, [_, functionConstraint]} ->
conjunction(functionConstraint)
node1 ->
constrained_function_type(data(node1), :argument)
end
end
@spec constrained_function_type_body(syntaxTree()) :: syntaxTree()
def constrained_function_type_body(node) do
case unwrap(node) do
{:type, _, :bounded_fun, [functionType, _]} ->
functionType
node1 ->
constrained_function_type(data(node1), :body)
end
end
@spec constraint(syntaxTree(), [syntaxTree()]) :: syntaxTree()
def constraint(name, types), do: tree(:constraint, constraint(name: name, types: types))
@spec constraint_argument(syntaxTree()) :: syntaxTree()
def constraint_argument(node) do
case unwrap(node) do
{:type, _, :constraint, [name, _]} ->
name
node1 ->
constraint(data(node1), :name)
end
end
@spec constraint_body(syntaxTree()) :: [syntaxTree()]
def constraint_body(node) do
case unwrap(node) do
{:type, _, :constraint, [_, types]} ->
types
node1 ->
constraint(data(node1), :types)
end
end
@spec copy_ann(syntaxTree(), syntaxTree()) :: syntaxTree()
def copy_ann(source, target), do: set_ann(target, get_ann(source))
@spec copy_attrs(syntaxTree(), syntaxTree()) :: syntaxTree()
def copy_attrs(s, t), do: set_attrs(t, get_attrs(s))
@spec copy_comments(syntaxTree(), syntaxTree()) :: syntaxTree()
def copy_comments(source, target), do: set_com(target, get_com(source))
@spec copy_pos(syntaxTree(), syntaxTree()) :: syntaxTree()
def copy_pos(source, target), do: set_pos(target, get_pos(source))
@spec data(syntaxTree()) :: term()
def data(tree(data: d)), do: d
def data(t), do: :erlang.error({:badarg, t})
@spec disjunction([syntaxTree()]) :: syntaxTree()
def disjunction(tests), do: tree(:disjunction, tests)
@spec disjunction_body(syntaxTree()) :: [syntaxTree()]
def disjunction_body(node), do: data(node)
@spec eof_marker() :: syntaxTree()
def eof_marker(), do: tree(:eof_marker)
@spec error_marker(term()) :: syntaxTree()
def error_marker(error), do: tree(:error_marker, error)
@spec error_marker_info(syntaxTree()) :: term()
def error_marker_info(node) do
case unwrap(node) do
{:error, error} ->
error
t ->
data(t)
end
end
@spec flatten_form_list(syntaxTree()) :: syntaxTree()
def flatten_form_list(node) do
fs = form_list_elements(node)
fs1 = :lists.reverse(flatten_form_list_1(fs, []))
copy_attrs(node, form_list(fs1))
end
@spec float(float()) :: syntaxTree()
def float(value), do: make_float(value)
@spec float_literal(syntaxTree()) :: charlist()
def float_literal(node), do: float_to_list(float_value(node))
@spec float_value(syntaxTree()) :: float()
def float_value(node) do
case unwrap(node) do
{:float, _, value} ->
value
node1 ->
data(node1)
end
end
@spec form_list([syntaxTree()]) :: syntaxTree()
def form_list(forms), do: tree(:form_list, forms)
@spec form_list_elements(syntaxTree()) :: [syntaxTree()]
def form_list_elements(node), do: data(node)
@spec fun_expr([syntaxTree()]) :: syntaxTree()
def fun_expr(clauses), do: tree(:fun_expr, clauses)
@spec fun_expr_arity(syntaxTree()) :: arity()
def fun_expr_arity(node), do: length(clause_patterns(hd(fun_expr_clauses(node))))
@spec fun_expr_clauses(syntaxTree()) :: [syntaxTree()]
def fun_expr_clauses(node) do
case unwrap(node) do
{:fun, _, {:clauses, clauses}} ->
clauses
node1 ->
data(node1)
end
end
@spec fun_type() :: syntaxTree()
def fun_type(), do: tree(:fun_type)
@spec function(syntaxTree(), [syntaxTree()]) :: syntaxTree()
def function(name, clauses), do: tree(:function, func(name: name, clauses: clauses))
@spec function_arity(syntaxTree()) :: arity()
def function_arity(node), do: length(clause_patterns(hd(function_clauses(node))))
@spec function_clauses(syntaxTree()) :: [syntaxTree()]
def function_clauses(node) do
case unwrap(node) do
{:function, _, _, _, clauses} ->
clauses
node1 ->
func(data(node1), :clauses)
end
end
@spec function_name(syntaxTree()) :: syntaxTree()
def function_name(node) do
case unwrap(node) do
{:function, pos, name, _, _} ->
set_pos(atom(name), pos)
node1 ->
func(data(node1), :name)
end
end
def function_type(type), do: function_type(:any_arity, type)
@spec function_type((:any_arity | syntaxTree()), syntaxTree()) :: syntaxTree()
def function_type(arguments, return), do: tree(:function_type, function_type(arguments: arguments, return: return))
@spec function_type_arguments(syntaxTree()) :: (:any_arity | [syntaxTree()])
def function_type_arguments(node) do
case unwrap(node) do
{:type, _, :fun, [{:type, _, :any}, _]} ->
:any_arity
{:type, _, :fun, [{:type, _, :product, arguments}, _]} ->
arguments
node1 ->
function_type(data(node1), :arguments)
end
end
@spec function_type_return(syntaxTree()) :: syntaxTree()
def function_type_return(node) do
case unwrap(node) do
{:type, _, :fun, [_, type]} ->
type
node1 ->
function_type(data(node1), :return)
end
end
@spec generator(syntaxTree(), syntaxTree()) :: syntaxTree()
def generator(pattern, body), do: tree(:generator, generator(pattern: pattern, body: body))
@spec generator_body(syntaxTree()) :: syntaxTree()
def generator_body(node) do
case unwrap(node) do
{:generate, _, _, body} ->
body
node1 ->
generator(data(node1), :body)
end
end
@spec generator_pattern(syntaxTree()) :: syntaxTree()
def generator_pattern(node) do
case unwrap(node) do
{:generate, _, pattern, _} ->
pattern
node1 ->
generator(data(node1), :pattern)
end
end
@spec get_ann(syntaxTree()) :: [term()]
def get_ann(tree(attr: attr)), do: attr(attr, :ann)
def get_ann(wrapper(attr: attr)), do: attr(attr, :ann)
def get_ann(_), do: []
@spec get_attrs(syntaxTree()) :: syntaxTreeAttributes()
def get_attrs(tree(attr: attr)), do: attr
def get_attrs(wrapper(attr: attr)), do: attr
def get_attrs(node), do: attr(pos: get_pos(node), ann: get_ann(node), com: get_com(node))
@spec get_pos(syntaxTree()) :: term()
def get_pos(tree(attr: attr)), do: attr(attr, :pos)
def get_pos(wrapper(attr: attr)), do: attr(attr, :pos)
def get_pos({:error, {pos, _, _}}), do: pos
def get_pos({:warning, {pos, _, _}}), do: pos
def get_pos(node), do: element(2, node)
@spec get_postcomments(syntaxTree()) :: [syntaxTree()]
def get_postcomments(tree(attr: attr)), do: get_postcomments_1(attr)
def get_postcomments(wrapper(attr: attr)), do: get_postcomments_1(attr)
def get_postcomments(_), do: []
@spec get_precomments(syntaxTree()) :: [syntaxTree()]
def get_precomments(tree(attr: attr)), do: get_precomments_1(attr)
def get_precomments(wrapper(attr: attr)), do: get_precomments_1(attr)
def get_precomments(_), do: []
@spec has_comments(syntaxTree()) :: boolean()
def has_comments(tree(attr: attr)) do
case attr(attr, :com) do
:none ->
false
com(pre: [], post: []) ->
false
_ ->
true
end
end
def has_comments(wrapper(attr: attr)) do
case attr(attr, :com) do
:none ->
false
com(pre: [], post: []) ->
false
_ ->
true
end
end
def has_comments(_), do: false
@spec if_expr([syntaxTree()]) :: syntaxTree()
def if_expr(clauses), do: tree(:if_expr, clauses)
@spec if_expr_clauses(syntaxTree()) :: [syntaxTree()]
def if_expr_clauses(node) do
case unwrap(node) do
{:if, _, clauses} ->
clauses
node1 ->
data(node1)
end
end
@spec implicit_fun(syntaxTree()) :: syntaxTree()
def implicit_fun(name), do: tree(:implicit_fun, name)
@spec implicit_fun(syntaxTree(), (:none | syntaxTree())) :: syntaxTree()
def implicit_fun(name, :none), do: implicit_fun(name)
def implicit_fun(name, arity), do: implicit_fun(arity_qualifier(name, arity))
@spec implicit_fun((:none | syntaxTree()), syntaxTree(), syntaxTree()) :: syntaxTree()
def implicit_fun(:none, name, arity), do: implicit_fun(name, arity)
def implicit_fun(module, name, arity), do: implicit_fun(module_qualifier(module, arity_qualifier(name, arity)))
@spec implicit_fun_name(syntaxTree()) :: syntaxTree()
def implicit_fun_name(node) do
case unwrap(node) do
{:fun, pos, {:function, atom, arity}} ->
arity_qualifier(set_pos(atom(atom), pos), set_pos(integer(arity), pos))
{:fun, pos, {:function, module, atom, arity}} when is_atom(module) and is_atom(atom) and is_integer(arity) ->
module_qualifier(set_pos(atom(module), pos), arity_qualifier(set_pos(atom(atom), pos), set_pos(integer(arity), pos)))
{:fun, _Pos, {:function, module, atom, arity}} ->
module_qualifier(module, arity_qualifier(atom, arity))
node1 ->
data(node1)
end
end
@spec infix_expr(syntaxTree(), syntaxTree(), syntaxTree()) :: syntaxTree()
def infix_expr(left, operator, right), do: tree(:infix_expr, infix_expr(operator: operator, left: left, right: right))
@spec infix_expr_left(syntaxTree()) :: syntaxTree()
def infix_expr_left(node) do
case unwrap(node) do
{:op, _, _, left, _} ->
left
node1 ->
infix_expr(data(node1), :left)
end
end
@spec infix_expr_operator(syntaxTree()) :: syntaxTree()
def infix_expr_operator(node) do
case unwrap(node) do
{:op, pos, operator, _, _} ->
set_pos(operator(operator), pos)
node1 ->
infix_expr(data(node1), :operator)
end
end
@spec infix_expr_right(syntaxTree()) :: syntaxTree()
def infix_expr_right(node) do
case unwrap(node) do
{:op, _, _, _, right} ->
right
node1 ->
infix_expr(data(node1), :right)
end
end
@spec integer(integer()) :: syntaxTree()
def integer(value), do: tree(:integer, value)
@spec integer_literal(syntaxTree()) :: charlist()
def integer_literal(node), do: integer_to_list(integer_value(node))
@spec integer_range_type(syntaxTree(), syntaxTree()) :: syntaxTree()
def integer_range_type(low, high), do: tree(:integer_range_type, integer_range_type(low: low, high: high))
@spec integer_range_type_high(syntaxTree()) :: syntaxTree()
def integer_range_type_high(node) do
case unwrap(node) do
{:type, _, :range, [_, high]} ->
high
node1 ->
integer_range_type(data(node1), :high)
end
end
@spec integer_range_type_low(syntaxTree()) :: syntaxTree()
def integer_range_type_low(node) do
case unwrap(node) do
{:type, _, :range, [low, _]} ->
low
node1 ->
integer_range_type(data(node1), :low)
end
end
@spec integer_value(syntaxTree()) :: integer()
def integer_value(node) do
case unwrap(node) do
{:integer, _, value} ->
value
node1 ->
data(node1)
end
end
@spec is_atom(syntaxTree(), atom()) :: boolean()
def is_atom(node, value) do
case unwrap(node) do
{:atom, _, value} ->
true
tree(type: :atom, data: value) ->
true
_ ->
false
end
end
@spec is_char(syntaxTree(), char()) :: boolean()
def is_char(node, value) do
case unwrap(node) do
{:char, _, value} ->
true
tree(type: :char, data: value) ->
true
_ ->
false
end
end
@spec is_form(syntaxTree()) :: boolean()
def is_form(node) do
case type(node) do
:attribute ->
true
:comment ->
true
:function ->
true
:eof_marker ->
true
:error_marker ->
true
:form_list ->
true
:warning_marker ->
true
:text ->
true
_ ->
false
end
end
@spec is_integer(syntaxTree(), integer()) :: boolean()
def is_integer(node, value) do
case unwrap(node) do
{:integer, _, value} ->
true
tree(type: :integer, data: value) ->
true
_ ->
false
end
end
@spec is_leaf(syntaxTree()) :: boolean()
def is_leaf(node) do
case type(node) do
:atom ->
true
:char ->
true
:comment ->
true
:eof_marker ->
true
:error_marker ->
true
:float ->
true
:fun_type ->
true
:integer ->
true
nil ->
true
:operator ->
true
:string ->
true
:text ->
true
:map_expr ->
map_expr_fields(node) === [] and map_expr_argument(node) === :none
:map_type ->
map_type_fields(node) === :any_size
:tuple ->
tuple_elements(node) === []
:tuple_type ->
tuple_type_elements(node) === :any_size
:underscore ->
true
:variable ->
true
:warning_marker ->
true
_ ->
false
end
end
@spec is_list_skeleton(syntaxTree()) :: boolean()
def is_list_skeleton(node) do
case type(node) do
:list ->
true
nil ->
true
_ ->
false
end
end
@spec is_literal(syntaxTree()) :: boolean()
def is_literal(t) do
case type(t) do
:atom ->
true
:integer ->
true
:float ->
true
:char ->
true
:string ->
true
nil ->
true
:list ->
is_literal(list_head(t)) and is_literal(list_tail(t))
:tuple ->
:lists.all(&is_literal/1, tuple_elements(t))
:map_expr ->
(case map_expr_argument(t) do
:none ->
true
arg ->
is_literal(arg)
end) and :lists.all(&is_literal_map_field/1, map_expr_fields(t))
:binary ->
:lists.all(&is_literal_binary_field/1, binary_fields(t))
_ ->
false
end
end
@spec is_proper_list(syntaxTree()) :: boolean()
def is_proper_list(node) do
case type(node) do
:list ->
case list_suffix(node) do
:none ->
true
tail ->
is_proper_list(tail)
end
nil ->
true
_ ->
false
end
end
@spec is_string(syntaxTree(), charlist()) :: boolean()
def is_string(node, value) do
case unwrap(node) do
{:string, _, value} ->
true
tree(type: :string, data: value) ->
true
_ ->
false
end
end
@spec is_tree(syntaxTree()) :: boolean()
def is_tree(tree()), do: true
def is_tree(_), do: false
@spec join_comments(syntaxTree(), syntaxTree()) :: syntaxTree()
def join_comments(source, target), do: add_postcomments(get_postcomments(source), add_precomments(get_precomments(source), target))
@spec list([syntaxTree()]) :: syntaxTree()
def list(list), do: list(list, :none)
@spec list([syntaxTree()], (:none | syntaxTree())) :: syntaxTree()
def list([], :none), do: apply(__MODULE__, nil, [])
def list(elements, tail) when elements !== [], do: tree(:list, list(prefix: elements, suffix: tail))
@spec list_comp(syntaxTree(), [syntaxTree()]) :: syntaxTree()
def list_comp(template, body), do: tree(:list_comp, list_comp(template: template, body: body))
@spec list_comp_body(syntaxTree()) :: [syntaxTree()]
def list_comp_body(node) do
case unwrap(node) do
{:lc, _, _, body} ->
body
node1 ->
list_comp(data(node1), :body)
end
end
@spec list_comp_template(syntaxTree()) :: syntaxTree()
def list_comp_template(node) do
case unwrap(node) do
{:lc, _, template, _} ->
template
node1 ->
list_comp(data(node1), :template)
end
end
@spec list_elements(syntaxTree()) :: [syntaxTree()]
def list_elements(node), do: :lists.reverse(list_elements(node, []))
@spec list_head(syntaxTree()) :: syntaxTree()
def list_head(node), do: hd(list_prefix(node))
@spec list_length(syntaxTree()) :: non_neg_integer()
def list_length(node), do: list_length(node, 0)
@spec list_prefix(syntaxTree()) :: [syntaxTree()]
def list_prefix(node) do
case unwrap(node) do
{:cons, _, head, tail} ->
[head | cons_prefix(tail)]
node1 ->
list(data(node1), :prefix)
end
end
@spec list_suffix(syntaxTree()) :: (:none | syntaxTree())
def list_suffix(node) do
case unwrap(node) do
{:cons, _, _, tail} ->
case cons_suffix(tail) do
{nil, _} ->
:none
tail1 ->
tail1
end
node1 ->
list(data(node1), :suffix)
end
end
@spec list_tail(syntaxTree()) :: syntaxTree()
def list_tail(node) do
tail = list_suffix(node)
case tl(list_prefix(node)) do
[] ->
cond do
tail === :none ->
apply(__MODULE__, nil, [])
true ->
tail
end
es ->
list(es, tail)
end
end
@spec macro(syntaxTree()) :: syntaxTree()
def macro(name), do: macro(name, :none)
@spec macro(syntaxTree(), (:none | [syntaxTree()])) :: syntaxTree()
def macro(name, arguments), do: tree(:macro, macro(name: name, arguments: arguments))
@spec macro_arguments(syntaxTree()) :: (:none | [syntaxTree()])
def macro_arguments(node), do: macro(data(node), :arguments)
@spec macro_name(syntaxTree()) :: syntaxTree()
def macro_name(node), do: macro(data(node), :name)
@spec make_tree(atom(), [[syntaxTree()]]) :: syntaxTree()
def make_tree(:annotated_type, [[n], [t]]), do: annotated_type(n, t)
def make_tree(:application, [[f], a]), do: application(f, a)
def make_tree(:arity_qualifier, [[n], [a]]), do: arity_qualifier(n, a)
def make_tree(:attribute, [[n]]), do: attribute(n)
def make_tree(:attribute, [[n], a]), do: attribute(n, a)
def make_tree(:binary, [fs]), do: binary(fs)
def make_tree(:binary_comp, [[t], b]), do: binary_comp(t, b)
def make_tree(:binary_field, [[b]]), do: binary_field(b)
def make_tree(:binary_field, [[b], ts]), do: binary_field(b, ts)
def make_tree(:binary_generator, [[p], [e]]), do: binary_generator(p, e)
def make_tree(:bitstring_type, [[m], [n]]), do: bitstring_type(m, n)
def make_tree(:block_expr, [b]), do: block_expr(b)
def make_tree(:case_expr, [[a], c]), do: case_expr(a, c)
def make_tree(:catch_expr, [[b]]), do: catch_expr(b)
def make_tree(:class_qualifier, [[a], [b]]), do: class_qualifier(a, b)
def make_tree(:clause, [p, b]), do: clause(p, :none, b)
def make_tree(:clause, [p, [g], b]), do: clause(p, g, b)
def make_tree(:cond_expr, [c]), do: cond_expr(c)
def make_tree(:conjunction, [e]), do: conjunction(e)
def make_tree(:constrained_function_type, [[f], c]), do: constrained_function_type(f, c)
def make_tree(:constraint, [[n], ts]), do: constraint(n, ts)
def make_tree(:disjunction, [e]), do: disjunction(e)
def make_tree(:form_list, [e]), do: form_list(e)
def make_tree(:fun_expr, [c]), do: fun_expr(c)
def make_tree(:function, [[n], c]), do: function(n, c)
def make_tree(:function_type, [[t]]), do: function_type(t)
def make_tree(:function_type, [a, [t]]), do: function_type(a, t)
def make_tree(:generator, [[p], [e]]), do: generator(p, e)
def make_tree(:if_expr, [c]), do: if_expr(c)
def make_tree(:implicit_fun, [[n]]), do: implicit_fun(n)
def make_tree(:infix_expr, [[l], [f], [r]]), do: infix_expr(l, f, r)
def make_tree(:integer_range_type, [[l], [h]]), do: integer_range_type(l, h)
def make_tree(:list, [p]), do: list(p)
def make_tree(:list, [p, [s]]), do: list(p, s)
def make_tree(:list_comp, [[t], b]), do: list_comp(t, b)
def make_tree(:macro, [[n]]), do: macro(n)
def make_tree(:macro, [[n], a]), do: macro(n, a)
def make_tree(:map_expr, [fs]), do: map_expr(fs)
def make_tree(:map_expr, [[e], fs]), do: map_expr(e, fs)
def make_tree(:map_field_assoc, [[k], [v]]), do: map_field_assoc(k, v)
def make_tree(:map_field_exact, [[k], [v]]), do: map_field_exact(k, v)
def make_tree(:map_type, [fs]), do: map_type(fs)
def make_tree(:map_type_assoc, [[n], [v]]), do: map_type_assoc(n, v)
def make_tree(:map_type_exact, [[n], [v]]), do: map_type_exact(n, v)
def make_tree(:match_expr, [[p], [e]]), do: match_expr(p, e)
def make_tree(:named_fun_expr, [[n], c]), do: named_fun_expr(n, c)
def make_tree(:module_qualifier, [[m], [n]]), do: module_qualifier(m, n)
def make_tree(:parentheses, [[e]]), do: parentheses(e)
def make_tree(:prefix_expr, [[f], [a]]), do: prefix_expr(f, a)
def make_tree(:receive_expr, [c]), do: receive_expr(c)
def make_tree(:receive_expr, [c, [e], a]), do: receive_expr(c, e, a)
def make_tree(:record_access, [[e], [t], [f]]), do: record_access(e, t, f)
def make_tree(:record_expr, [[t], f]), do: record_expr(t, f)
def make_tree(:record_expr, [[e], [t], f]), do: record_expr(e, t, f)
def make_tree(:record_field, [[n]]), do: record_field(n)
def make_tree(:record_field, [[n], [e]]), do: record_field(n, e)
def make_tree(:record_index_expr, [[t], [f]]), do: record_index_expr(t, f)
def make_tree(:record_type, [[n], fs]), do: record_type(n, fs)
def make_tree(:record_type_field, [[n], [t]]), do: record_type_field(n, t)
def make_tree(:size_qualifier, [[n], [a]]), do: size_qualifier(n, a)
def make_tree(:try_expr, [b, c, h, a]), do: try_expr(b, c, h, a)
def make_tree(:tuple, [e]), do: tuple(e)
def make_tree(:tuple_type, [es]), do: tuple_type(es)
def make_tree(:type_application, [[n], ts]), do: type_application(n, ts)
def make_tree(:type_union, [es]), do: type_union(es)
def make_tree(:typed_record_field, [[f], [t]]), do: typed_record_field(f, t)
def make_tree(:user_type_application, [[n], ts]), do: user_type_application(n, ts)
@spec map_expr([syntaxTree()]) :: syntaxTree()
def map_expr(fields), do: map_expr(:none, fields)
@spec map_expr((:none | syntaxTree()), [syntaxTree()]) :: syntaxTree()
def map_expr(argument, fields), do: tree(:map_expr, map_expr(argument: argument, fields: fields))
@spec map_expr_argument(syntaxTree()) :: (:none | syntaxTree())
def map_expr_argument(node) do
case unwrap(node) do
{:map, _, _} ->
:none
{:map, _, argument, _} ->
argument
node1 ->
map_expr(data(node1), :argument)
end
end
@spec map_expr_fields(syntaxTree()) :: [syntaxTree()]
def map_expr_fields(node) do
case unwrap(node) do
{:map, _, fields} ->
fields
{:map, _, _, fields} ->
fields
node1 ->
map_expr(data(node1), :fields)
end
end
@spec map_field_assoc(syntaxTree(), syntaxTree()) :: syntaxTree()
def map_field_assoc(name, value), do: tree(:map_field_assoc, map_field_assoc(name: name, value: value))
@spec map_field_assoc_name(syntaxTree()) :: syntaxTree()
def map_field_assoc_name(node) do
case node do
{:map_field_assoc, _, name, _} ->
name
_ ->
map_field_assoc(data(node), :name)
end
end
@spec map_field_assoc_value(syntaxTree()) :: syntaxTree()
def map_field_assoc_value(node) do
case node do
{:map_field_assoc, _, _, value} ->
value
_ ->
map_field_assoc(data(node), :value)
end
end
@spec map_field_exact(syntaxTree(), syntaxTree()) :: syntaxTree()
def map_field_exact(name, value), do: tree(:map_field_exact, map_field_exact(name: name, value: value))
@spec map_field_exact_name(syntaxTree()) :: syntaxTree()
def map_field_exact_name(node) do
case node do
{:map_field_exact, _, name, _} ->
name
_ ->
map_field_exact(data(node), :name)
end
end
@spec map_field_exact_value(syntaxTree()) :: syntaxTree()
def map_field_exact_value(node) do
case node do
{:map_field_exact, _, _, value} ->
value
_ ->
map_field_exact(data(node), :value)
end
end
def map_type(), do: map_type(:any_size)
@spec map_type((:any_size | [syntaxTree()])) :: syntaxTree()
def map_type(fields), do: tree(:map_type, fields)
@spec map_type_assoc(syntaxTree(), syntaxTree()) :: syntaxTree()
def map_type_assoc(name, value), do: tree(:map_type_assoc, map_type_assoc(name: name, value: value))
@spec map_type_assoc_name(syntaxTree()) :: syntaxTree()
def map_type_assoc_name(node) do
case node do
{:type, _, :map_field_assoc, [name, _]} ->
name
_ ->
map_type_assoc(data(node), :name)
end
end
@spec map_type_assoc_value(syntaxTree()) :: syntaxTree()
def map_type_assoc_value(node) do
case node do
{:type, _, :map_field_assoc, [_, value]} ->
value
_ ->
map_type_assoc(data(node), :value)
end
end
@spec map_type_exact(syntaxTree(), syntaxTree()) :: syntaxTree()
def map_type_exact(name, value), do: tree(:map_type_exact, map_type_exact(name: name, value: value))
@spec map_type_exact_name(syntaxTree()) :: syntaxTree()
def map_type_exact_name(node) do
case node do
{:type, _, :map_field_exact, [name, _]} ->
name
_ ->
map_type_exact(data(node), :name)
end
end
@spec map_type_exact_value(syntaxTree()) :: syntaxTree()
def map_type_exact_value(node) do
case node do
{:type, _, :map_field_exact, [_, value]} ->
value
_ ->
map_type_exact(data(node), :value)
end
end
@spec map_type_fields(syntaxTree()) :: (:any_size | [syntaxTree()])
def map_type_fields(node) do
case unwrap(node) do
{:type, _, :map, fields} when is_list(fields) ->
fields
{:type, _, :map, :any} ->
:any_size
node1 ->
data(node1)
end
end
@spec match_expr(syntaxTree(), syntaxTree()) :: syntaxTree()
def match_expr(pattern, body), do: tree(:match_expr, match_expr(pattern: pattern, body: body))
@spec match_expr_body(syntaxTree()) :: syntaxTree()
def match_expr_body(node) do
case unwrap(node) do
{:match, _, _, body} ->
body
node1 ->
match_expr(data(node1), :body)
end
end
@spec match_expr_pattern(syntaxTree()) :: syntaxTree()
def match_expr_pattern(node) do
case unwrap(node) do
{:match, _, pattern, _} ->
pattern
node1 ->
match_expr(data(node1), :pattern)
end
end
@spec meta(syntaxTree()) :: syntaxTree()
def meta(t) do
case type(t) do
:variable ->
case :lists.member(:meta_var, get_ann(t)) do
false ->
meta_precomment(t)
true ->
set_ann(t, :lists.delete(:meta_var, get_ann(t)))
end
_ ->
case has_comments(t) do
true ->
meta_precomment(t)
false ->
meta_1(t)
end
end
end
def module_info() do
# body not decompiled
end
def module_info(p0) do
# body not decompiled
end
@spec module_qualifier(syntaxTree(), syntaxTree()) :: syntaxTree()
def module_qualifier(module, body), do: tree(:module_qualifier, module_qualifier(module: module, body: body))
@spec module_qualifier_argument(syntaxTree()) :: syntaxTree()
def module_qualifier_argument(node) do
case unwrap(node) do
{:remote, _, module, _} ->
module
node1 ->
module_qualifier(data(node1), :module)
end
end
@spec module_qualifier_body(syntaxTree()) :: syntaxTree()
def module_qualifier_body(node) do
case unwrap(node) do
{:remote, _, _, body} ->
body
node1 ->
module_qualifier(data(node1), :body)
end
end
@spec named_fun_expr(syntaxTree(), [syntaxTree()]) :: syntaxTree()
def named_fun_expr(name, clauses), do: tree(:named_fun_expr, named_fun_expr(name: name, clauses: clauses))
@spec named_fun_expr_arity(syntaxTree()) :: arity()
def named_fun_expr_arity(node), do: length(clause_patterns(hd(named_fun_expr_clauses(node))))
@spec named_fun_expr_clauses(syntaxTree()) :: [syntaxTree()]
def named_fun_expr_clauses(node) do
case unwrap(node) do
{:named_fun, _, _, clauses} ->
clauses
node1 ->
named_fun_expr(data(node1), :clauses)
end
end
@spec named_fun_expr_name(syntaxTree()) :: syntaxTree()
def named_fun_expr_name(node) do
case unwrap(node) do
{:named_fun, pos, name, _} ->
set_pos(variable(name), pos)
node1 ->
named_fun_expr(data(node1), :name)
end
end
@spec unquote(:nil)() :: syntaxTree()
def unquote(:nil)(), do: tree(nil)
@spec normalize_list(syntaxTree()) :: syntaxTree()
def normalize_list(node) do
case type(node) do
:list ->
p = list_prefix(node)
case list_suffix(node) do
:none ->
copy_attrs(node, normalize_list_1(p, apply(__MODULE__, nil, [])))
tail ->
tail1 = normalize_list(tail)
copy_attrs(node, normalize_list_1(p, tail1))
end
_ ->
node
end
end
@spec operator((atom() | charlist())) :: syntaxTree()
def operator(name) when is_atom(name), do: tree(:operator, name)
def operator(name), do: tree(:operator, list_to_atom(name))
@spec operator_literal(syntaxTree()) :: charlist()
def operator_literal(node), do: atom_to_list(operator_name(node))
@spec operator_name(syntaxTree()) :: atom()
def operator_name(node), do: data(node)
@spec parentheses(syntaxTree()) :: syntaxTree()
def parentheses(expr), do: tree(:parentheses, expr)
@spec parentheses_body(syntaxTree()) :: syntaxTree()
def parentheses_body(node), do: data(node)
@spec prefix_expr(syntaxTree(), syntaxTree()) :: syntaxTree()
def prefix_expr(operator, argument), do: tree(:prefix_expr, prefix_expr(operator: operator, argument: argument))
@spec prefix_expr_argument(syntaxTree()) :: syntaxTree()
def prefix_expr_argument(node) do
case unwrap(node) do
{:op, _, _, argument} ->
argument
node1 ->
prefix_expr(data(node1), :argument)
end
end
@spec prefix_expr_operator(syntaxTree()) :: syntaxTree()
def prefix_expr_operator(node) do
case unwrap(node) do
{:op, pos, operator, _} ->
set_pos(operator(operator), pos)
node1 ->
prefix_expr(data(node1), :operator)
end
end
@spec receive_expr([syntaxTree()]) :: syntaxTree()
def receive_expr(clauses), do: receive_expr(clauses, :none, [])
@spec receive_expr([syntaxTree()], (:none | syntaxTree()), [syntaxTree()]) :: syntaxTree()
def receive_expr(clauses, timeout, action) do
action1 = case timeout do
:none ->
[]
_ ->
action
end
tree(:receive_expr, receive_expr(clauses: clauses, timeout: timeout, action: action1))
end
@spec receive_expr_action(syntaxTree()) :: [syntaxTree()]
def receive_expr_action(node) do
case unwrap(node) do
{:receive, _, _} ->
[]
{:receive, _, _, _, action} ->
action
node1 ->
receive_expr(data(node1), :action)
end
end
@spec receive_expr_clauses(syntaxTree()) :: [syntaxTree()]
def receive_expr_clauses(node) do
case unwrap(node) do
{:receive, _, clauses} ->
clauses
{:receive, _, clauses, _, _} ->
clauses
node1 ->
receive_expr(data(node1), :clauses)
end
end
@spec receive_expr_timeout(syntaxTree()) :: (:none | syntaxTree())
def receive_expr_timeout(node) do
case unwrap(node) do
{:receive, _, _} ->
:none
{:receive, _, _, timeout, _} ->
timeout
node1 ->
receive_expr(data(node1), :timeout)
end
end
@spec record_access(syntaxTree(), syntaxTree(), syntaxTree()) :: syntaxTree()
def record_access(argument, type, field), do: tree(:record_access, record_access(argument: argument, type: type, field: field))
@spec record_access_argument(syntaxTree()) :: syntaxTree()
def record_access_argument(node) do
case unwrap(node) do
{:record_field, _, argument, _, _} ->
argument
node1 ->
record_access(data(node1), :argument)
end
end
@spec record_access_field(syntaxTree()) :: syntaxTree()
def record_access_field(node) do
case unwrap(node) do
{:record_field, _, _, _, field} ->
field
node1 ->
record_access(data(node1), :field)
end
end
@spec record_access_type(syntaxTree()) :: syntaxTree()
def record_access_type(node) do
case unwrap(node) do
{:record_field, pos, _, type, _} ->
set_pos(atom(type), pos)
node1 ->
record_access(data(node1), :type)
end
end
@spec record_expr(syntaxTree(), [syntaxTree()]) :: syntaxTree()
def record_expr(type, fields), do: record_expr(:none, type, fields)
@spec record_expr((:none | syntaxTree()), syntaxTree(), [syntaxTree()]) :: syntaxTree()
def record_expr(argument, type, fields), do: tree(:record_expr, record_expr(argument: argument, type: type, fields: fields))
@spec record_expr_argument(syntaxTree()) :: (:none | syntaxTree())
def record_expr_argument(node) do
case unwrap(node) do
{:record, _, _, _} ->
:none
{:record, _, argument, _, _} ->
argument
node1 ->
record_expr(data(node1), :argument)
end
end
@spec record_expr_fields(syntaxTree()) :: [syntaxTree()]
def record_expr_fields(node) do
case unwrap(node) do
{:record, _, _, fields} ->
unfold_record_fields(fields)
{:record, _, _, _, fields} ->
unfold_record_fields(fields)
node1 ->
record_expr(data(node1), :fields)
end
end
@spec record_expr_type(syntaxTree()) :: syntaxTree()
def record_expr_type(node) do
case unwrap(node) do
{:record, pos, type, _} ->
set_pos(atom(type), pos)
{:record, pos, _, type, _} ->
set_pos(atom(type), pos)
node1 ->
record_expr(data(node1), :type)
end
end
@spec record_field(syntaxTree()) :: syntaxTree()
def record_field(name), do: record_field(name, :none)
@spec record_field(syntaxTree(), (:none | syntaxTree())) :: syntaxTree()
def record_field(name, value), do: tree(:record_field, record_field(name: name, value: value))
@spec record_field_name(syntaxTree()) :: syntaxTree()
def record_field_name(node), do: record_field(data(node), :name)
@spec record_field_value(syntaxTree()) :: (:none | syntaxTree())
def record_field_value(node), do: record_field(data(node), :value)
@spec record_index_expr(syntaxTree(), syntaxTree()) :: syntaxTree()
def record_index_expr(type, field), do: tree(:record_index_expr, record_index_expr(type: type, field: field))
@spec record_index_expr_field(syntaxTree()) :: syntaxTree()
def record_index_expr_field(node) do
case unwrap(node) do
{:record_index, _, _, field} ->
field
node1 ->
record_index_expr(data(node1), :field)
end
end
@spec record_index_expr_type(syntaxTree()) :: syntaxTree()
def record_index_expr_type(node) do
case unwrap(node) do
{:record_index, pos, type, _} ->
set_pos(atom(type), pos)
node1 ->
record_index_expr(data(node1), :type)
end
end
@spec record_type(syntaxTree(), [syntaxTree()]) :: syntaxTree()
def record_type(name, fields), do: tree(:record_type, record_type(name: name, fields: fields))
@spec record_type_field(syntaxTree(), syntaxTree()) :: syntaxTree()
def record_type_field(name, type), do: tree(:record_type_field, record_type_field(name: name, type: type))
@spec record_type_field_name(syntaxTree()) :: syntaxTree()
def record_type_field_name(node) do
case unwrap(node) do
{:type, _, :field_type, [name, _]} ->
name
node1 ->
record_type_field(data(node1), :name)
end
end
@spec record_type_field_type(syntaxTree()) :: syntaxTree()
def record_type_field_type(node) do
case unwrap(node) do
{:type, _, :field_type, [_, type]} ->
type
node1 ->
record_type_field(data(node1), :type)
end
end
@spec record_type_fields(syntaxTree()) :: [syntaxTree()]
def record_type_fields(node) do
case unwrap(node) do
{:type, _, :record, [_ | fields]} ->
fields
node1 ->
record_type(data(node1), :fields)
end
end
@spec record_type_name(syntaxTree()) :: syntaxTree()
def record_type_name(node) do
case unwrap(node) do
{:type, _, :record, [name | _]} ->
name
node1 ->
record_type(data(node1), :name)
end
end
@spec remove_comments(syntaxTree()) :: syntaxTree()
def remove_comments(node) do
case node do
tree(attr: attr) ->
tree(node, attr: attr(attr, com: :none))
wrapper(attr: attr) ->
wrapper(node, attr: attr(attr, com: :none))
_ ->
node
end
end
@spec revert(syntaxTree()) :: syntaxTree()
def revert(node) do
case is_tree(node) do
false ->
unwrap(node)
true ->
case is_leaf(node) do
true ->
revert_root(node)
false ->
gs = for l <- subtrees(node) do
for x <- l do
revert(x)
end
end
node1 = update_tree(node, gs)
revert_root(node1)
end
end
end
@spec revert_forms(forms()) :: [erl_parse()]
def revert_forms(forms) when is_list(forms), do: revert_forms(form_list(forms))
def revert_forms(t) do
case type(t) do
:form_list ->
t1 = flatten_form_list(t)
try do
{:ok, revert_forms_1(form_list_elements(t1))}
catch
error -> error
end
|> case do
{:ok, fs} ->
fs
{:error, _} = error ->
:erlang.error(error)
{:"EXIT", r} ->
exit(r)
r ->
throw(r)
end
_ ->
:erlang.error({:badarg, t})
end
end
@spec set_ann(syntaxTree(), [term()]) :: syntaxTree()
def set_ann(node, as) do
case node do
tree(attr: attr) ->
tree(node, attr: attr(attr, ann: as))
wrapper(attr: attr) ->
wrapper(node, attr: attr(attr, ann: as))
_ ->
set_ann(wrap(node), as)
end
end
@spec set_attrs(syntaxTree(), syntaxTreeAttributes()) :: syntaxTree()
def set_attrs(node, attr) do
case node do
tree() ->
tree(node, attr: attr)
wrapper() ->
wrapper(node, attr: attr)
_ ->
set_attrs(wrap(node), attr)
end
end
@spec set_pos(syntaxTree(), term()) :: syntaxTree()
def set_pos(node, pos) do
case node do
tree(attr: attr) ->
tree(node, attr: attr(attr, pos: pos))
wrapper(attr: attr) ->
wrapper(node, attr: attr(attr, pos: pos))
_ ->
set_pos(wrap(node), pos)
end
end
@spec set_postcomments(syntaxTree(), [syntaxTree()]) :: syntaxTree()
def set_postcomments(node, cs) do
case node do
tree(attr: attr) ->
tree(node, attr: set_postcomments_1(attr, cs))
wrapper(attr: attr) ->
wrapper(node, attr: set_postcomments_1(attr, cs))
_ ->
set_postcomments(wrap(node), cs)
end
end
@spec set_precomments(syntaxTree(), [syntaxTree()]) :: syntaxTree()
def set_precomments(node, cs) do
case node do
tree(attr: attr) ->
tree(node, attr: set_precomments_1(attr, cs))
wrapper(attr: attr) ->
wrapper(node, attr: set_precomments_1(attr, cs))
_ ->
set_precomments(wrap(node), cs)
end
end
@spec size_qualifier(syntaxTree(), syntaxTree()) :: syntaxTree()
def size_qualifier(body, size), do: tree(:size_qualifier, size_qualifier(body: body, size: size))
@spec size_qualifier_argument(syntaxTree()) :: syntaxTree()
def size_qualifier_argument(node), do: size_qualifier(data(node), :size)
@spec size_qualifier_body(syntaxTree()) :: syntaxTree()
def size_qualifier_body(node), do: size_qualifier(data(node), :body)
@spec string(charlist()) :: syntaxTree()
def string(string), do: tree(:string, string)
@spec string_literal(syntaxTree()) :: [char(), ...]
def string_literal(node), do: string_literal(node, :latin1)
@spec string_literal(syntaxTree(), encoding()) :: [char(), ...]
def string_literal(node, :utf8), do: :io_lib.write_string(string_value(node))
def string_literal(node, :unicode), do: :io_lib.write_string(string_value(node))
def string_literal(node, :latin1), do: :io_lib.write_string_as_latin1(string_value(node))
@spec string_value(syntaxTree()) :: charlist()
def string_value(node) do
case unwrap(node) do
{:string, _, list} ->
list
node1 ->
data(node1)
end
end
@spec subtrees(syntaxTree()) :: [[syntaxTree()]]
def subtrees(t), do: ...
@spec text(charlist()) :: syntaxTree()
def text(string), do: tree(:text, string)
@spec text_string(syntaxTree()) :: charlist()
def text_string(node), do: data(node)
@spec tree(atom()) :: tree()
def tree(type), do: tree(type, [])
@spec tree(atom(), term()) :: tree()
def tree(type, data), do: tree(type: type, data: data)
@spec try_after_expr([syntaxTree()], [syntaxTree()]) :: syntaxTree()
def try_after_expr(body, erlangVariableAfter), do: try_expr(body, [], [], erlangVariableAfter)
@spec try_expr([syntaxTree()], [syntaxTree()]) :: syntaxTree()
def try_expr(body, handlers), do: try_expr(body, [], handlers)
@spec try_expr([syntaxTree()], [syntaxTree()], [syntaxTree()]) :: syntaxTree()
def try_expr(body, clauses, handlers), do: try_expr(body, clauses, handlers, [])
@spec try_expr([syntaxTree()], [syntaxTree()], [syntaxTree()], [syntaxTree()]) :: syntaxTree()
def try_expr(body, clauses, handlers, erlangVariableAfter), do: tree(:try_expr, try_expr(body: body, clauses: clauses, handlers: handlers, after: erlangVariableAfter))
@spec try_expr_after(syntaxTree()) :: [syntaxTree()]
def try_expr_after(node) do
case unwrap(node) do
{:try, _, _, _, _, erlangVariableAfter} ->
erlangVariableAfter
node1 ->
try_expr(data(node1), :after)
end
end
@spec try_expr_body(syntaxTree()) :: [syntaxTree()]
def try_expr_body(node) do
case unwrap(node) do
{:try, _, body, _, _, _} ->
body
node1 ->
try_expr(data(node1), :body)
end
end
@spec try_expr_clauses(syntaxTree()) :: [syntaxTree()]
def try_expr_clauses(node) do
case unwrap(node) do
{:try, _, _, clauses, _, _} ->
clauses
node1 ->
try_expr(data(node1), :clauses)
end
end
@spec try_expr_handlers(syntaxTree()) :: [syntaxTree()]
def try_expr_handlers(node) do
case unwrap(node) do
{:try, _, _, _, handlers, _} ->
unfold_try_clauses(handlers)
node1 ->
try_expr(data(node1), :handlers)
end
end
@spec tuple([syntaxTree()]) :: syntaxTree()
def tuple(list), do: tree(:tuple, list)
@spec tuple_elements(syntaxTree()) :: [syntaxTree()]
def tuple_elements(node) do
case unwrap(node) do
{:tuple, _, list} ->
list
node1 ->
data(node1)
end
end
@spec tuple_size(syntaxTree()) :: non_neg_integer()
def tuple_size(node), do: length(tuple_elements(node))
def tuple_type(), do: tuple_type(:any_size)
@spec tuple_type((:any_size | [syntaxTree()])) :: syntaxTree()
def tuple_type(elements), do: tree(:tuple_type, elements)
@spec tuple_type_elements(syntaxTree()) :: (:any_size | [syntaxTree()])
def tuple_type_elements(node) do
case unwrap(node) do
{:type, _, :tuple, elements} when is_list(elements) ->
elements
{:type, _, :tuple, :any} ->
:any_size
node1 ->
data(node1)
end
end
@spec type(syntaxTree()) :: atom()
def type(tree(type: t)), do: t
def type(wrapper(type: t)), do: t
def type(node), do: ...
@spec type_application(syntaxTree(), [syntaxTree()]) :: syntaxTree()
def type_application(typeName, arguments), do: tree(:type_application, type_application(type_name: typeName, arguments: arguments))
@spec type_application((:none | syntaxTree()), syntaxTree(), [syntaxTree()]) :: syntaxTree()
def type_application(:none, typeName, arguments), do: type_application(typeName, arguments)
def type_application(module, typeName, arguments), do: type_application(module_qualifier(module, typeName), arguments)
@spec type_application_arguments(syntaxTree()) :: [syntaxTree()]
def type_application_arguments(node) do
case unwrap(node) do
{:remote_type, _, [_, _, arguments]} ->
arguments
{:type, _, _, arguments} ->
arguments
node1 ->
type_application(data(node1), :arguments)
end
end
@spec type_application_name(syntaxTree()) :: syntaxTree()
def type_application_name(node) do
case unwrap(node) do
{:remote_type, _, [module, name, _]} ->
module_qualifier(module, name)
{:type, pos, name, _} ->
set_pos(atom(name), pos)
node1 ->
type_application(data(node1), :type_name)
end
end
@spec type_union([syntaxTree()]) :: syntaxTree()
def type_union(types), do: tree(:type_union, types)
@spec type_union_types(syntaxTree()) :: [syntaxTree()]
def type_union_types(node) do
case unwrap(node) do
{:type, _, :union, types} when is_list(types) ->
types
node1 ->
data(node1)
end
end
@spec typed_record_field(syntaxTree(), syntaxTree()) :: syntaxTree()
def typed_record_field(field, type), do: tree(:typed_record_field, typed_record_field(body: field, type: type))
@spec typed_record_field_body(syntaxTree()) :: syntaxTree()
def typed_record_field_body(node), do: typed_record_field(data(node), :body)
@spec typed_record_field_type(syntaxTree()) :: syntaxTree()
def typed_record_field_type(node), do: typed_record_field(data(node), :type)
@spec underscore() :: syntaxTree()
def underscore(), do: tree(:underscore, [])
@spec update_tree(syntaxTree(), [[syntaxTree()]]) :: syntaxTree()
def update_tree(node, groups), do: copy_attrs(node, make_tree(type(node), groups))
@spec user_type_application(syntaxTree(), [syntaxTree()]) :: syntaxTree()
def user_type_application(typeName, arguments), do: tree(:user_type_application, user_type_application(type_name: typeName, arguments: arguments))
@spec user_type_application_arguments(syntaxTree()) :: [syntaxTree()]
def user_type_application_arguments(node) do
case unwrap(node) do
{:user_type, _, _, arguments} ->
arguments
node1 ->
user_type_application(data(node1), :arguments)
end
end
@spec user_type_application_name(syntaxTree()) :: syntaxTree()
def user_type_application_name(node) do
case unwrap(node) do
{:user_type, pos, name, _} ->
set_pos(atom(name), pos)
node1 ->
user_type_application(data(node1), :type_name)
end
end
@spec variable((atom() | charlist())) :: syntaxTree()
def variable(name) when is_atom(name), do: tree(:variable, name)
def variable(name), do: tree(:variable, list_to_atom(name))
@spec variable_literal(syntaxTree()) :: charlist()
def variable_literal(node) do
case unwrap(node) do
{:var, _, name} ->
atom_to_list(name)
node1 ->
atom_to_list(data(node1))
end
end
@spec variable_name(syntaxTree()) :: atom()
def variable_name(node) do
case unwrap(node) do
{:var, _, name} ->
name
node1 ->
data(node1)
end
end
@spec warning_marker(term()) :: syntaxTree()
def warning_marker(warning), do: tree(:warning_marker, warning)
@spec warning_marker_info(syntaxTree()) :: term()
def warning_marker_info(node) do
case unwrap(node) do
{:warning, error} ->
error
t ->
data(t)
end
end
# Private Functions
defp unquote(:"-abstract/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-abstract/1-lc$^1/1-1-")(p0) do
# body not decompiled
end
defp unquote(:"-concrete/1-fun-2-")(p0, p1) do
# body not decompiled
end
defp unquote(:"-concrete/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-concrete/1-lc$^1/1-1-")(p0) do
# body not decompiled
end
defp unquote(:"-fold_binary_field_types/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-fold_function_names/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-fold_record_fields/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-fold_variable_names/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-is_literal/1-fun-0-")(p0) do
# body not decompiled
end
defp unquote(:"-is_literal/1-fun-1-")(p0) do
# body not decompiled
end
defp unquote(:"-is_literal/1-fun-2-")(p0) do
# body not decompiled
end
defp unquote(:"-meta_1/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-meta_subtrees/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-meta_subtrees/1-lc$^1/1-1-")(p0) do
# body not decompiled
end
defp unquote(:"-normalize_list_1/2-fun-0-")(p0, p1) do
# body not decompiled
end
defp unquote(:"-revert/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-revert/1-lc$^1/1-1-")(p0) do
# body not decompiled
end
defp unquote(:"-revert_case_expr/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-revert_clause_disjunction/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-revert_cond_expr/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-revert_fun_expr/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-revert_function/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-revert_if_expr/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-revert_list/1-fun-0-")(p0, p1, p2) do
# body not decompiled
end
defp unquote(:"-revert_named_fun_expr/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-revert_receive_expr/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-revert_try_expr/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-revert_try_expr/1-lc$^1/1-1-")(p0) do
# body not decompiled
end
defp unquote(:"-unfold_binary_field_types/2-lc$^0/1-0-")(p0, p1) do
# body not decompiled
end
defp unquote(:"-unfold_function_names/2-fun-0-")(p0, p1) do
# body not decompiled
end
defp unquote(:"-unfold_function_names/2-lc$^1/1-0-")(p0, p1) do
# body not decompiled
end
defp unquote(:"-unfold_record_fields/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-unfold_try_clauses/1-lc$^0/1-0-")(p0) do
# body not decompiled
end
defp unquote(:"-unfold_variable_names/2-lc$^0/1-0-")(p0, p1) do
# body not decompiled
end
def abstract_list([t | ts]), do: [abstract(t) | abstract_list(ts)]
def abstract_list([]), do: []
def abstract_tail(h1, [h2 | t]), do: cons(abstract(h1), abstract_tail(h2, t))
def abstract_tail(h, t), do: cons(abstract(h), abstract(t))
def add_postcomments_1(cs, attr(com: :none) = attr), do: attr(attr, com: com(post: cs))
def add_postcomments_1(cs, attr(com: com) = attr), do: attr(attr, com: com(com, post: com(com, :post) ++ cs))
def add_precomments_1(cs, attr(com: :none) = attr), do: attr(attr, com: com(pre: cs))
def add_precomments_1(cs, attr(com: com) = attr), do: attr(attr, com: com(com, pre: com(com, :pre) ++ cs))
def concrete_list([e | es]), do: [concrete(e) | concrete_list(es)]
def concrete_list([]), do: []
def conjunction_list([l | ls]), do: [conjunction(l) | conjunction_list(ls)]
def conjunction_list([]), do: []
def cons_prefix({:cons, _, head, tail}), do: [head | cons_prefix(tail)]
def cons_prefix(_), do: []
def cons_suffix({:cons, _, _, tail}), do: cons_suffix(tail)
def cons_suffix(tail), do: tail
def flatten_form_list_1([f | fs], as) do
case type(f) do
:form_list ->
as1 = flatten_form_list_1(form_list_elements(f), as)
flatten_form_list_1(fs, as1)
_ ->
flatten_form_list_1(fs, [f | as])
end
end
def flatten_form_list_1([], as), do: as
def fold_binary_field_type(node) do
case type(node) do
:size_qualifier ->
{concrete(size_qualifier_body(node)), concrete(size_qualifier_argument(node))}
_ ->
concrete(node)
end
end
def fold_binary_field_types(ts) do
for t <- ts do
fold_binary_field_type(t)
end
end
def fold_function_name(n) do
name = arity_qualifier_body(n)
arity = arity_qualifier_argument(n)
true = type(name) === :atom and type(arity) === :integer
{concrete(name), concrete(arity)}
end
def fold_function_names(ns) do
for n <- ns do
fold_function_name(n)
end
end
def fold_record_field(f) do
case type(f) do
:typed_record_field ->
field = fold_record_field_1(typed_record_field_body(f))
type = typed_record_field_type(f)
{:typed_record_field, field, type}
:record_field ->
fold_record_field_1(f)
end
end
def fold_record_field_1(f) do
pos = get_pos(f)
name = record_field_name(f)
case record_field_value(f) do
:none ->
{:record_field, pos, name}
value ->
{:record_field, pos, name, value}
end
end
def fold_record_fields(fs) do
for f <- fs do
fold_record_field(f)
end
end
def fold_try_clause({:clause, pos, [p], guard, body}) do
p1 = case type(p) do
:class_qualifier ->
{:tuple, pos, [class_qualifier_argument(p), class_qualifier_body(p), {:var, pos, :_}]}
_ ->
{:tuple, pos, [{:atom, pos, :throw}, p, {:var, pos, :_}]}
end
{:clause, pos, [p1], guard, body}
end
def fold_variable_names(vs) do
for v <- vs do
variable_name(v)
end
end
def get_com(tree(attr: attr)), do: attr(attr, :com)
def get_com(wrapper(attr: attr)), do: attr(attr, :com)
def get_com(_), do: :none
def get_postcomments_1(attr(com: :none)), do: []
def get_postcomments_1(attr(com: com(post: cs))), do: cs
def get_precomments_1(attr(com: :none)), do: []
def get_precomments_1(attr(com: com(pre: cs))), do: cs
def is_literal_binary_field(f) do
case binary_field_types(f) do
[] ->
is_literal(binary_field_body(f))
_ ->
false
end
end
def is_literal_map_field(f) do
case type(f) do
:map_field_assoc ->
is_literal(map_field_assoc_name(f)) and is_literal(map_field_assoc_value(f))
:map_field_exact ->
false
end
end
def is_printable(s), do: :io_lib.printable_list(s)
def list_elements(node, as) do
case type(node) do
:list ->
as1 = :lists.reverse(list_prefix(node)) ++ as
case list_suffix(node) do
:none ->
as1
tail ->
list_elements(tail, as1)
end
nil ->
as
end
end
def list_length(node, a) do
case type(node) do
:list ->
a1 = length(list_prefix(node)) + a
case list_suffix(node) do
:none ->
a1
tail ->
list_length(tail, a1)
end
nil ->
a
end
end
def make_float(value), do: tree(:float, value)
def meta_0(t), do: meta_1(remove_comments(t))
def meta_1(t), do: ...
def meta_call(f, as), do: application(atom(:erl_syntax), atom(f), as)
def meta_list([t | ts]), do: [meta(t) | meta_list(ts)]
def meta_list([]), do: []
def meta_postcomment(t) do
case get_postcomments(t) do
[] ->
meta_0(t)
cs ->
meta_call(:set_postcomments, [meta_0(t), list(meta_list(cs))])
end
end
def meta_precomment(t) do
case get_precomments(t) do
[] ->
meta_postcomment(t)
cs ->
meta_call(:set_precomments, [meta_postcomment(t), list(meta_list(cs))])
end
end
def meta_subtrees(gs) do
list((for g <- gs do
list((for t <- g do
meta(t)
end))
end))
end
def normalize_list_1(es, tail) do
:lists.foldr(fn x, a ->
list([x], a)
end, tail, es)
end
def revert_annotated_type(node) do
pos = get_pos(node)
name = annotated_type_name(node)
type = annotated_type_body(node)
{:ann_type, pos, [name, type]}
end
def revert_application(node) do
pos = get_pos(node)
operator = application_operator(node)
arguments = application_arguments(node)
{:call, pos, operator, arguments}
end
def revert_atom(node) do
pos = get_pos(node)
{:atom, pos, atom_value(node)}
end
def revert_attribute(node) do
name = attribute_name(node)
args = attribute_arguments(node)
pos = get_pos(node)
case type(name) do
:atom ->
revert_attribute_1(atom_value(name), args, pos, node)
_ ->
node
end
end
def revert_attribute_1(:module, [m], pos, node) do
case revert_module_name(m) do
{:ok, a} ->
{:attribute, pos, :module, a}
:error ->
node
end
end
def revert_attribute_1(:module, [m, list], pos, node) do
vs = case is_list_skeleton(list) do
true ->
case is_proper_list(list) do
true ->
fold_variable_names(list_elements(list))
false ->
node
end
false ->
node
end
case revert_module_name(m) do
{:ok, a} ->
{:attribute, pos, :module, {a, vs}}
:error ->
node
end
end
def revert_attribute_1(:export, [list], pos, node) do
case is_list_skeleton(list) do
true ->
case is_proper_list(list) do
true ->
fs = fold_function_names(list_elements(list))
{:attribute, pos, :export, fs}
false ->
node
end
false ->
node
end
end
def revert_attribute_1(:import, [m], pos, node) do
case revert_module_name(m) do
{:ok, a} ->
{:attribute, pos, :import, a}
:error ->
node
end
end
def revert_attribute_1(:import, [m, list], pos, node) do
case revert_module_name(m) do
{:ok, a} ->
case is_list_skeleton(list) do
true ->
case is_proper_list(list) do
true ->
fs = fold_function_names(list_elements(list))
{:attribute, pos, :import, {a, fs}}
false ->
node
end
false ->
node
end
:error ->
node
end
end
def revert_attribute_1(:file, [a, line], pos, node) do
case type(a) do
:string ->
case type(line) do
:integer ->
{:attribute, pos, :file, {concrete(a), concrete(line)}}
_ ->
node
end
_ ->
node
end
end
def revert_attribute_1(:record, [a, tuple], pos, node) do
case type(a) do
:atom ->
case type(tuple) do
:tuple ->
fs = fold_record_fields(tuple_elements(tuple))
{:attribute, pos, :record, {concrete(a), fs}}
_ ->
node
end
_ ->
node
end
end
def revert_attribute_1(n, [t], pos, _), do: {:attribute, pos, n, concrete(t)}
def revert_attribute_1(_, _, _, node), do: node
def revert_binary(node) do
pos = get_pos(node)
{:bin, pos, binary_fields(node)}
end
def revert_binary_comp(node) do
pos = get_pos(node)
template = binary_comp_template(node)
body = binary_comp_body(node)
{:bc, pos, template, body}
end
def revert_binary_field(node) do
pos = get_pos(node)
body = binary_field_body(node)
{expr, size} = case type(body) do
:size_qualifier ->
{size_qualifier_body(body), size_qualifier_argument(body)}
_ ->
{body, :default}
end
types = case binary_field_types(node) do
[] ->
:default
ts ->
fold_binary_field_types(ts)
end
{:bin_element, pos, expr, size, types}
end
def revert_binary_generator(node) do
pos = get_pos(node)
pattern = binary_generator_pattern(node)
body = binary_generator_body(node)
{:b_generate, pos, pattern, body}
end
def revert_bitstring_type(node) do
pos = get_pos(node)
m = bitstring_type_m(node)
n = bitstring_type_n(node)
{:type, pos, :binary, [m, n]}
end
def revert_block_expr(node) do
pos = get_pos(node)
body = block_expr_body(node)
{:block, pos, body}
end
def revert_case_expr(node) do
pos = get_pos(node)
argument = case_expr_argument(node)
clauses = for c <- case_expr_clauses(node) do
revert_clause(c)
end
{:case, pos, argument, clauses}
end
def revert_catch_expr(node) do
pos = get_pos(node)
expr = catch_expr_body(node)
{:catch, pos, expr}
end
def revert_char(node) do
pos = get_pos(node)
{:char, pos, char_value(node)}
end
def revert_clause(node) do
pos = get_pos(node)
guard = case clause_guard(node) do
:none ->
[]
e ->
case type(e) do
:disjunction ->
revert_clause_disjunction(e)
:conjunction ->
[conjunction_body(e)]
_ ->
[[e]]
end
end
{:clause, pos, clause_patterns(node), guard, clause_body(node)}
end
def revert_clause_disjunction(d) do
for e <- disjunction_body(d) do
case type(e) do
:conjunction ->
conjunction_body(e)
_ ->
[e]
end
end
end
def revert_cond_expr(node) do
pos = get_pos(node)
clauses = for c <- cond_expr_clauses(node) do
revert_clause(c)
end
{:cond, pos, clauses}
end
def revert_constrained_function_type(node) do
pos = get_pos(node)
functionType = constrained_function_type_body(node)
functionConstraint = conjunction_body(constrained_function_type_argument(node))
{:type, pos, :bounded_fun, [functionType, functionConstraint]}
end
def revert_constraint(node) do
pos = get_pos(node)
name = constraint_argument(node)
types = constraint_body(node)
{:type, pos, :constraint, [name, types]}
end
def revert_eof_marker(node) do
pos = get_pos(node)
{:eof, pos}
end
def revert_error_marker(node), do: {:error, error_marker_info(node)}
def revert_float(node) do
pos = get_pos(node)
{:float, pos, float_value(node)}
end
def revert_forms_1([t | ts]) do
case type(t) do
:comment ->
revert_forms_1(ts)
_ ->
t1 = revert(t)
case is_tree(t1) do
true ->
throw({:error, t1})
false ->
[t1 | revert_forms_1(ts)]
end
end
end
def revert_forms_1([]), do: []
def revert_fun_expr(node) do
clauses = for c <- fun_expr_clauses(node) do
revert_clause(c)
end
pos = get_pos(node)
{:fun, pos, {:clauses, clauses}}
end
def revert_fun_type(node) do
pos = get_pos(node)
{:type, pos, :fun, []}
end
def revert_function(node) do
name = function_name(node)
clauses = for c <- function_clauses(node) do
revert_clause(c)
end
pos = get_pos(node)
case type(name) do
:atom ->
a = function_arity(node)
{:function, pos, concrete(name), a, clauses}
_ ->
node
end
end
def revert_function_type(node) do
pos = get_pos(node)
type = function_type_return(node)
case function_type_arguments(node) do
:any_arity ->
{:type, pos, :fun, [{:type, pos, :any}, type]}
arguments ->
{:type, pos, :fun, [{:type, pos, :product, arguments}, type]}
end
end
def revert_generator(node) do
pos = get_pos(node)
pattern = generator_pattern(node)
body = generator_body(node)
{:generate, pos, pattern, body}
end
def revert_if_expr(node) do
pos = get_pos(node)
clauses = for c <- if_expr_clauses(node) do
revert_clause(c)
end
{:if, pos, clauses}
end
def revert_implicit_fun(node) do
pos = get_pos(node)
name = implicit_fun_name(node)
case type(name) do
:arity_qualifier ->
f = arity_qualifier_body(name)
a = arity_qualifier_argument(name)
case {type(f), type(a)} do
{:atom, :integer} ->
{:fun, pos, {:function, concrete(f), concrete(a)}}
_ ->
node
end
:module_qualifier ->
m = module_qualifier_argument(name)
name1 = module_qualifier_body(name)
case type(name1) do
:arity_qualifier ->
f = arity_qualifier_body(name1)
a = arity_qualifier_argument(name1)
{:fun, pos, {:function, m, f, a}}
_ ->
node
end
_ ->
node
end
end
def revert_infix_expr(node) do
pos = get_pos(node)
operator = infix_expr_operator(node)
left = infix_expr_left(node)
right = infix_expr_right(node)
case type(operator) do
:operator ->
{:op, pos, operator_name(operator), left, right}
_ ->
node
end
end
def revert_integer(node) do
pos = get_pos(node)
{:integer, pos, integer_value(node)}
end
def revert_integer_range_type(node) do
pos = get_pos(node)
low = integer_range_type_low(node)
high = integer_range_type_high(node)
{:type, pos, :range, [low, high]}
end
def revert_list(node) do
pos = get_pos(node)
p = list_prefix(node)
s = case list_suffix(node) do
:none ->
revert_nil(set_pos(apply(__MODULE__, nil, []), pos))
s1 ->
s1
end
:lists.foldr(fn x, a ->
{:cons, pos, x, a}
end, s, p)
end
def revert_list_comp(node) do
pos = get_pos(node)
template = list_comp_template(node)
body = list_comp_body(node)
{:lc, pos, template, body}
end
def revert_map_expr(node) do
pos = get_pos(node)
argument = map_expr_argument(node)
fields = map_expr_fields(node)
case argument do
:none ->
{:map, pos, fields}
_ ->
{:map, pos, argument, fields}
end
end
def revert_map_field_assoc(node) do
pos = get_pos(node)
name = map_field_assoc_name(node)
value = map_field_assoc_value(node)
{:map_field_assoc, pos, name, value}
end
def revert_map_field_exact(node) do
pos = get_pos(node)
name = map_field_exact_name(node)
value = map_field_exact_value(node)
{:map_field_exact, pos, name, value}
end
def revert_map_type(node) do
pos = get_pos(node)
{:type, pos, :map, map_type_fields(node)}
end
def revert_map_type_assoc(node) do
pos = get_pos(node)
name = map_type_assoc_name(node)
value = map_type_assoc_value(node)
{:type, pos, :map_type_assoc, [name, value]}
end
def revert_map_type_exact(node) do
pos = get_pos(node)
name = map_type_exact_name(node)
value = map_type_exact_value(node)
{:type, pos, :map_type_exact, [name, value]}
end
def revert_match_expr(node) do
pos = get_pos(node)
pattern = match_expr_pattern(node)
body = match_expr_body(node)
{:match, pos, pattern, body}
end
def revert_module_name(a) do
case type(a) do
:atom ->
{:ok, concrete(a)}
_ ->
:error
end
end
def revert_module_qualifier(node) do
pos = get_pos(node)
module = module_qualifier_argument(node)
body = module_qualifier_body(node)
{:remote, pos, module, body}
end
def revert_named_fun_expr(node) do
pos = get_pos(node)
name = named_fun_expr_name(node)
clauses = for c <- named_fun_expr_clauses(node) do
revert_clause(c)
end
case type(name) do
:variable ->
{:named_fun, pos, variable_name(name), clauses}
_ ->
node
end
end
def revert_nil(node) do
pos = get_pos(node)
{nil, pos}
end
def revert_parentheses(node), do: parentheses_body(node)
def revert_prefix_expr(node) do
pos = get_pos(node)
operator = prefix_expr_operator(node)
argument = prefix_expr_argument(node)
case type(operator) do
:operator ->
{:op, pos, operator_name(operator), argument}
_ ->
node
end
end
def revert_receive_expr(node) do
pos = get_pos(node)
clauses = for c <- receive_expr_clauses(node) do
revert_clause(c)
end
timeout = receive_expr_timeout(node)
action = receive_expr_action(node)
case timeout do
:none ->
{:receive, pos, clauses}
_ ->
{:receive, pos, clauses, timeout, action}
end
end
def revert_record_access(node) do
pos = get_pos(node)
argument = record_access_argument(node)
type = record_access_type(node)
field = record_access_field(node)
case type(type) do
:atom ->
{:record_field, pos, argument, concrete(type), field}
_ ->
node
end
end
def revert_record_expr(node) do
pos = get_pos(node)
argument = record_expr_argument(node)
type = record_expr_type(node)
fields = record_expr_fields(node)
case type(type) do
:atom ->
t = concrete(type)
fs = fold_record_fields(fields)
case argument do
:none ->
{:record, pos, t, fs}
_ ->
{:record, pos, argument, t, fs}
end
_ ->
node
end
end
def revert_record_index_expr(node) do
pos = get_pos(node)
type = record_index_expr_type(node)
field = record_index_expr_field(node)
case type(type) do
:atom ->
{:record_index, pos, concrete(type), field}
_ ->
node
end
end
def revert_record_type(node) do
pos = get_pos(node)
name = record_type_name(node)
fields = record_type_fields(node)
{:type, pos, :record, [name | fields]}
end
def revert_record_type_field(node) do
pos = get_pos(node)
name = record_type_field_name(node)
type = record_type_field_type(node)
{:type, pos, :field_type, [name, type]}
end
def revert_root(node), do: ...
def revert_string(node) do
pos = get_pos(node)
{:string, pos, string_value(node)}
end
def revert_try_clause(node), do: fold_try_clause(revert_clause(node))
def revert_try_expr(node) do
pos = get_pos(node)
body = try_expr_body(node)
clauses = for c <- try_expr_clauses(node) do
revert_clause(c)
end
handlers = for c <- try_expr_handlers(node) do
revert_try_clause(c)
end
erlangVariableAfter = try_expr_after(node)
{:try, pos, body, clauses, handlers, erlangVariableAfter}
end
def revert_tuple(node) do
pos = get_pos(node)
{:tuple, pos, tuple_elements(node)}
end
def revert_tuple_type(node) do
pos = get_pos(node)
{:type, pos, :tuple, tuple_type_elements(node)}
end
def revert_type_application(node) do
pos = get_pos(node)
typeName = type_application_name(node)
arguments = type_application_arguments(node)
case type(typeName) do
:module_qualifier ->
module = module_qualifier_argument(typeName)
name = module_qualifier_body(typeName)
{:remote_type, pos, [module, name, arguments]}
:atom ->
{:type, pos, atom_value(typeName), arguments}
end
end
def revert_type_union(node) do
pos = get_pos(node)
{:type, pos, :union, type_union_types(node)}
end
def revert_underscore(node) do
pos = get_pos(node)
{:var, pos, :_}
end
def revert_user_type_application(node) do
pos = get_pos(node)
typeName = user_type_application_name(node)
arguments = user_type_application_arguments(node)
{:user_type, pos, atom_value(typeName), arguments}
end
def revert_variable(node) do
pos = get_pos(node)
name = variable_name(node)
{:var, pos, name}
end
def revert_warning_marker(node), do: {:warning, warning_marker_info(node)}
def set_com(node, com) do
case node do
tree(attr: attr) ->
tree(node, attr: attr(attr, com: com))
wrapper(attr: attr) ->
wrapper(node, attr: attr(attr, com: com))
_ ->
set_com(wrap(node), com)
end
end
def set_postcomments_1(attr(com: :none) = attr, cs), do: attr(attr, com: com(post: cs))
def set_postcomments_1(attr(com: com) = attr, cs), do: attr(attr, com: com(com, post: cs))
def set_precomments_1(attr(com: :none) = attr, cs), do: attr(attr, com: com(pre: cs))
def set_precomments_1(attr(com: com) = attr, cs), do: attr(attr, com: com(com, pre: cs))
def unfold_binary_field_type({type, size}, pos), do: set_pos(size_qualifier(atom(type), integer(size)), pos)
def unfold_binary_field_type(type, pos), do: set_pos(atom(type), pos)
def unfold_binary_field_types(ts, pos) do
for t <- ts do
unfold_binary_field_type(t, pos)
end
end
def unfold_function_names(ns, pos) do
f = fn {atom, arity} ->
n = arity_qualifier(atom(atom), integer(arity))
set_pos(n, pos)
end
for n <- ns do
f.(n)
end
end
def unfold_record_field({:typed_record_field, field, type}) do
f = unfold_record_field_1(field)
set_pos(typed_record_field(f, type), get_pos(f))
end
def unfold_record_field(field), do: unfold_record_field_1(field)
def unfold_record_field_1({:record_field, pos, name}), do: set_pos(record_field(name), pos)
def unfold_record_field_1({:record_field, pos, name, value}), do: set_pos(record_field(name, value), pos)
def unfold_record_fields(fs) do
for f <- fs do
unfold_record_field(f)
end
end
def unfold_try_clause({:clause, pos, [{:tuple, _, [{:atom, _, :throw}, v, _]}], guard, body}), do: {:clause, pos, [v], guard, body}
def unfold_try_clause({:clause, pos, [{:tuple, _, [c, v, _]}], guard, body}), do: {:clause, pos, [class_qualifier(c, v)], guard, body}
def unfold_try_clauses(cs) do
for c <- cs do
unfold_try_clause(c)
end
end
def unfold_variable_names(vs, pos) do
for v <- vs do
set_pos(variable(v), pos)
end
end
@spec unwrap(syntaxTree()) :: (tree() | erl_parse())
def unwrap(wrapper(tree: node)), do: node
def unwrap(node), do: node
@spec wrap(erl_parse()) :: wrapper()
def wrap(node), do: wrapper(type: type(node), attr: attr(pos: get_pos(node)), tree: node)
end
|
testData/org/elixir_lang/beam/decompiler/erl_syntax.ex
| 0.773644
| 0.472379
|
erl_syntax.ex
|
starcoder
|
import Kernel, except: [round: 1]
defmodule Float do
@moduledoc """
Functions for working with floating point numbers.
"""
@doc """
Parses a binary into a float.
If successful, returns a tuple of the form `{float, remainder_of_binary}`.
Otherwise `:error`.
If a float formated string wants to be directly converted to float,
`String.to_float/1" can be used instead.
## Examples
iex> Float.parse("34")
{34.0,""}
iex> Float.parse("34.25")
{34.25,""}
iex> Float.parse("56.5xyz")
{56.5,"xyz"}
iex> Float.parse("pi")
:error
"""
@spec parse(binary) :: {float, binary} | :error
def parse("-" <> binary) do
case parse_unsign(binary) do
:error -> :error
{number, remainder} -> {-number, remainder}
end
end
def parse(binary) do
parse_unsign(binary)
end
defp parse_unsign("-" <> _), do: :error
defp parse_unsign(binary) when is_binary(binary) do
case Integer.parse binary do
:error -> :error
{integer_part, after_integer} -> parse_unsign after_integer, integer_part
end
end
# Dot followed by digit is required afterwards or we are done
defp parse_unsign(<< ?., char, rest :: binary >>, int) when char in ?0..?9 do
parse_unsign(rest, char - ?0, 1, int)
end
defp parse_unsign(rest, int) do
{:erlang.float(int), rest}
end
# Handle decimal points
defp parse_unsign(<< char, rest :: binary >>, float, decimal, int) when char in ?0..?9 do
parse_unsign rest, 10 * float + (char - ?0), decimal + 1, int
end
defp parse_unsign(<< ?e, after_e :: binary >>, float, decimal, int) do
case Integer.parse after_e do
:error ->
# Note we rebuild the binary here instead of breaking it apart at
# the function clause because the current approach copies a binary
# just on this branch. If we broke it apart in the function clause,
# the copy would happen when calling Integer.parse/1.
{floatify(int, float, decimal), << ?e, after_e :: binary >>}
{exponential, after_exponential} ->
{floatify(int, float, decimal, exponential), after_exponential}
end
end
defp parse_unsign(bitstring, float, decimal, int) do
{floatify(int, float, decimal), bitstring}
end
defp floatify(int, float, decimal, exponential \\ 0) do
multiplier = if int < 0, do: -1.0, else: 1.0
# Try to ensure the minimum amount of rounding errors
result = multiplier * (abs(int) * :math.pow(10, decimal) + float) * :math.pow(10, exponential - decimal)
# Try avoiding stuff like this:
# iex(1)> 0.0001 * 75
# 0.007500000000000001
# Due to IEEE 754 floating point standard
# http://docs.oracle.com/cd/E19957-01/806-3568/ncg_goldberg.html
final_decimal_places = decimal - exponential
if final_decimal_places > 0 do
decimal_power_round = :math.pow(10, final_decimal_places)
multiplied_result = result * decimal_power_round
epsilon = :math.pow(10, -final_decimal_places) * 5
closet_approximation = ceil_within_error_range(multiplied_result, epsilon)
trunc(closet_approximation) / decimal_power_round
else
result
end
end
defp ceil_within_error_range(result, epsilon) do
ceiled = ceil(result)
if ceiled - result <= epsilon do
ceiled
else
result
end
end
@doc """
Rounds a float to the largest integer less than or equal to `num`.
Floor also accepts a precision to round a floating point value down
to an arbitrary number of fractional digits (between 0 and 15).
This function always returns floats. One may use `Kernel.trunc/1` to
truncate the result to an integer afterwards.
## Examples
iex> Float.floor(34.25)
34.0
iex> Float.floor(-56.5)
-57.0
iex> Float.floor(34.253, 2)
34.25
"""
@spec floor(float, 0..15) :: float
def floor(number, precision \\ 0) when is_float(number) and precision in 0..15 do
power = power_of_10(precision)
number = number * power
truncated = trunc(number)
variance = if number - truncated < 0, do: -1.0, else: 0.0
(truncated + variance) / power
end
@doc """
Rounds a float to the largest integer greater than or equal to `num`.
Ceil also accepts a precision to round a floating point value down to
an arbitrary number of fractional digits (between 0 and 15).
This function always returns floats. One may use `Kernel.trunc/1` to
truncate the result to an integer afterwards.
## Examples
iex> Float.ceil(34.25)
35.0
iex> Float.ceil(-56.5)
-56.0
iex> Float.ceil(34.253, 2)
34.26
"""
@spec ceil(float, 0..15) :: float
def ceil(number, precision \\ 0) when is_float(number) and precision in 0..15 do
power = power_of_10(precision)
number = number * power
truncated = trunc(number)
variance = if number - truncated > 0, do: 1.0, else: 0.0
(truncated + variance) / power
end
@doc """
Rounds a floating point value to an arbitrary number of fractional digits
(between 0 and 15).
This function only accepts floats and returns floats. Use `Kernel.round/1`
if you want a function that accepts both floats and integers and always
returns an integer.
## Examples
iex> Float.round(5.5674, 3)
5.567
iex> Float.round(5.5675, 3)
5.568
iex> Float.round(-5.5674, 3)
-5.567
iex> Float.round(-5.5675, 3)
-5.568
"""
@spec round(float, 0..15) :: float
def round(number, precision \\ 0) when is_float(number) and precision in 0..15 do
power = power_of_10(precision)
Kernel.round(number * power) / power
end
Enum.reduce 0..15, 1, fn x, acc ->
defp power_of_10(unquote(x)), do: unquote(acc)
acc * 10
end
@doc """
Returns a char list which corresponds to the text representation of the given float.
Inlined by the compiler.
## Examples
iex> Float.to_char_list(7.0)
'7.00000000000000000000e+00'
"""
@spec to_char_list(float) :: char_list
def to_char_list(number) do
:erlang.float_to_list(number)
end
@doc """
Returns a list which corresponds to the text representation
of `float`.
## Options
* `:decimals` — number of decimal points to show
* `:scientific` — number of decimal points to show, in scientific format
* `:compact` — when true, use the most compact representation (ignored
with the `scientific` option)
## Examples
iex> Float.to_char_list 7.1, [decimals: 2, compact: true]
'7.1'
"""
@spec to_char_list(float, list) :: char_list
def to_char_list(float, options) do
:erlang.float_to_list(float, expand_compact(options))
end
@doc """
Returns a binary which corresponds to the text representation
of `some_float`.
Inlined by the compiler.
## Examples
iex> Float.to_string(7.0)
"7.00000000000000000000e+00"
"""
@spec to_string(float) :: String.t
def to_string(some_float) do
:erlang.float_to_binary(some_float)
end
@doc """
Returns a binary which corresponds to the text representation
of `float`.
## Options
* `:decimals` — number of decimal points to show
* `:scientific` — number of decimal points to show, in scientific format
* `:compact` — when true, use the most compact representation (ignored
with the `scientific` option)
## Examples
iex> Float.to_string 7.1, [decimals: 2, compact: true]
"7.1"
"""
@spec to_string(float, list) :: String.t
def to_string(float, options) do
:erlang.float_to_binary(float, expand_compact(options))
end
defp expand_compact([{:compact, false}|t]), do: expand_compact(t)
defp expand_compact([{:compact, true}|t]), do: [:compact|expand_compact(t)]
defp expand_compact([h|t]), do: [h|expand_compact(t)]
defp expand_compact([]), do: []
end
|
lib/elixir/lib/float.ex
| 0.934245
| 0.670325
|
float.ex
|
starcoder
|
defmodule Twirp.Telemetry do
@moduledoc """
Provides telemetry for twirp clients and servers
Twirp executes the following events:
* `[:twirp, :rpc, :start]` - Executed before making an rpc call to another service.
#### Measurements
* `:system_time` - The system time
#### Metadata
* `:client` - The client module issuing the call.
* `:method` - The RPC method
* `:service` - The url for the service
* `[:twirp, :rpc, :stop]` - Executed after a connection is retrieved from the pool.
#### Measurements
* `:duration` - Duration to send an rpc to a service and wait for a response.
#### Metadata
* `:client` - The client module issuing the call.
* `:method` - The RPC method
* `:service` - The url for the service
* `:error` - Optional key. If the call resulted in an error this key will be present along with the Twirp Error.
* `[:twirp, :call, :start]` - Executed before the twirp handler is called
#### Measurements
* `:system_time` - The system time
#### Metadata
There is no metadata for this event.
* `[:twirp, :call, :stop]` - Executed after twirp handler has been executed.
#### Measurements
* `:duration` - Duration to handle the rpc call.
#### Metadata
* `:content_type` - The content type being used, either proto or json.
* `:method` - The name of the method being executed.
* `:error` - Optional key. If the call resulted in an error this key will be present along with the Twirp Error.
* `[:twirp, :call, :exception]` - Executed if the twirp handler raises an exception
#### Measurements
* `:duration` - Duration to handle the rpc call.
#### Metadata
* `:kind` - The kind of error that was raised.
* `:error` - The exception
* `:stacktrace` - The stacktrace
"""
@doc false
def start(event, meta \\ %{}, extra_measurements \\ %{}) do
start_time = System.monotonic_time()
:telemetry.execute(
[:twirp, event, :start],
Map.merge(extra_measurements, %{system_time: System.system_time()}),
meta
)
start_time
end
@doc false
def stop(event, start_time, meta \\ %{}, extra_measurements \\ %{}) do
end_time = System.monotonic_time()
measurements = Map.merge(extra_measurements, %{duration: end_time - start_time})
:telemetry.execute(
[:twirp, event, :stop],
measurements,
meta
)
end
@doc false
def exception(event, start_time, kind, reason, stack, meta \\ %{}, extra_measurements \\ %{}) do
end_time = System.monotonic_time()
measurements = Map.merge(extra_measurements, %{duration: end_time - start_time})
meta =
meta
|> Map.put(:kind, kind)
|> Map.put(:error, reason)
|> Map.put(:stacktrace, stack)
:telemetry.execute([:twirp, event, :exception], measurements, meta)
end
@doc false
def event(event, measurements, meta) do
:telemetry.execute([:twirp, event], measurements, meta)
end
end
|
lib/twirp/telemetry.ex
| 0.844922
| 0.609698
|
telemetry.ex
|
starcoder
|
## matched at unqualified no parentheses call
@one @two 3 do
end
## matched dot call
@one two . () do
end
## matched qualified no arguments call
@one Two . three do
end
## matched qualified no parentheses call
@one Two . three 4 do
end
## matched qualified parentheses call
@one Two . three() do
end
## matched unqualified no arguments call
@one two do
end
## matched unqualified no parentheses call
@one two 3 do
end
## matched unqualified parentheses call
@one two() do
end
# Unmatched dot calls
## matched at unqualified no parentheses call
one . (@two 3) do
end
## matched dot call
one . (two . ()) do
end
## matched qualified no arguments call
one . (Two . three) do
end
## matched qualified no parentheses call
one . (Two . three 4) do
end
## matched qualified parentheses call
one . (Two . three()) do
end
## matched unqualified no arguments call
one . (two) do
end
## matched unqualified no parentheses call
one . (two 3) do
end
## matched unqualified parentheses call
one . (two()) do
end
# Unmatched At Unqualified No Arguments Call
One . two do
end
# Unmatched Qualified No Parentheses Calls
## matched at unqualified no parentheses call
One . two @three 4 do
end
## matched dot call
One . two three . () do
end
## matched qualified no arguments call
One . two Three . four do
end
## matched qualified no parentheses call
One . two Three . four 5 do
end
## matched qualified parentheses call
One . two Three . four() do
end
## matched unqualified no arguments call
One . two three do
end
## matched unqualified no parentheses call
One . two three 4 do
end
## matched unqualified parentheses call
One . two three() do
end
# Unmatched Qualified Parentheses Calls
## matched at unqualified no parentheses call
One . two(@three 4) do
end
##(matched) dot call
One . two(three . ()) do
end
## matched qualified no arguments call
One . two(Three . four) do
end
## matched qualified no parentheses call
One . two(Three . four 5) do
end
## matched qualified parentheses call
One . two(Three . four()) do
end
## matched unqualified no arguments call
One . two(three) do
end
## matched unqualified no parentheses call
One . two(three 4) do
end
## matched unqualified parentheses call
One . two(three()) do
end
|
testData/org/elixir_lang/formatting/incorrect_spaces_around_dot_operator.ex
| 0.635675
| 0.507385
|
incorrect_spaces_around_dot_operator.ex
|
starcoder
|
defmodule StepFlow.WorkflowDefinitionView do
use StepFlow, :view
alias StepFlow.WorkflowDefinitionView
def render("index.json", %{workflow_definitions: %{data: workflow_definitions, total: total}}) do
%{
data: render_many(workflow_definitions, WorkflowDefinitionView, "workflow_definition.json"),
total: total
}
end
def render("show.json", %{workflow_definition: workflow_definition}) do
%{
data:
render_one(
workflow_definition,
WorkflowDefinitionView,
"workflow_definition_with_steps.json"
)
}
end
def render("workflow_definition.json", %{workflow_definition: workflow_definition}) do
%{
schema_version: workflow_definition.schema_version,
id: workflow_definition.id,
identifier: workflow_definition.identifier,
label: workflow_definition.label,
icon: workflow_definition.icon,
is_live: workflow_definition.is_live,
version_major: workflow_definition.version_major,
version_minor: workflow_definition.version_minor,
version_micro: workflow_definition.version_micro,
tags: workflow_definition.tags,
start_parameters: workflow_definition.start_parameters,
parameters: workflow_definition.parameters
}
end
def render("workflow_definition_with_steps.json", %{workflow_definition: workflow_definition}) do
%{
id: workflow_definition.id,
identifier: workflow_definition.identifier,
label: workflow_definition.label,
icon: workflow_definition.icon,
version_major: workflow_definition.version_major,
version_minor: workflow_definition.version_minor,
version_micro: workflow_definition.version_micro,
tags: workflow_definition.tags,
start_parameters: workflow_definition.start_parameters,
parameters: workflow_definition.parameters,
steps: workflow_definition.steps
}
end
def render("error.json", %{errors: errors}) do
%{
errors: [
%{
reason: errors.reason,
message: Map.get(errors, :message, "Incorrect parameters")
}
]
}
end
end
|
lib/step_flow/view/workflow_definition_view.ex
| 0.616474
| 0.511229
|
workflow_definition_view.ex
|
starcoder
|
defmodule ExState.Ecto.Query do
@moduledoc """
`ExState.Ecto.Query` provides functions for querying workflow state in the
database through Ecto.
"""
import Ecto.Query
@doc """
where_state/2 takes a subject query and state and filters based on workflows that are in the
exact state that is passed. Nested states can be passed as a list of states and will be converted
to a valid state identifier in the query.
Pass the state as the keyword list `not: state` in order to query the inverse.
Examples:
investment #=> %Investment{workflow: %Workflow{state: "subscribing.confirming_options"}}
Investment
|> where_state("subscribing.confirming_options")
|> Repo.all() #=> [investment]
Investment
|> where_state([:subscribing, :confirming_options])
|> Repo.all() #=> [investment]
Investment
|> where_state(["subscribing", "confirming_options"])
|> Repo.all() #=> [investment]
Investment
|> where_state(:subscribing)
|> Repo.all() #=> []
Investment
|> where_state(not: [:subscribing, :confirming_suitability])
|> Repo.all() #=> [investment]
Investment
|> where_state(not: [:subscribing, :confirming_options])
|> Repo.all() #=> []
"""
def where_state(subject_query, not: state) do
subject_query
|> join_workflow_maybe()
|> where([workflow: workflow], workflow.state != ^to_state_id(state))
end
def where_state(subject_query, state) do
subject_query
|> join_workflow_maybe()
|> where([workflow: workflow], workflow.state == ^to_state_id(state))
end
@doc """
where_state_in/2 takes a subject query and a list of states and filters based on workflows that
are in one of the exact states that are passed. Nested states can be passed as a list of states
and will be converted to a valid state identifier in the query.
Pass the state as the keyword list `not: state` in order to query the inverse.
Examples:
investment1 #=> %Investment{workflow: %Workflow{state: "subscribing.confirming_options"}}
investment2 #=> %Investment{workflow: %Workflow{state: "executed"}}
Investment
|> where_state_in([
[:subscribing, :confirming_options],
:executed
])
|> Repo.all() #=> [investment1, investment2]
Investment
|> where_state_in(["subscribing.confirming_options"])
|> Repo.all() #=> [investment1]
Investment
|> where_state_in([:subscribing])
|> Repo.all() #=> []
Investment
|> where_state_in(not: [[:subscribing, :confirming_options]])
|> Repo.all() #=> [investment2]
Investment
|> where_state_in(not: [:subscribing])
|> Repo.all() #=> [investment1, investment2]
"""
def where_state_in(subject_query, not: states) do
subject_query
|> join_workflow_maybe()
|> where([workflow: workflow], workflow.state not in ^Enum.map(states, &to_state_id/1))
end
def where_state_in(subject_query, states) do
subject_query
|> join_workflow_maybe()
|> where([workflow: workflow], workflow.state in ^Enum.map(states, &to_state_id/1))
end
@doc """
where_any_state/2 takes a subject query and a state and filters based on workflows that are equal
to or in a child state of the given state. Nested states can be passed as a list of states
and will be converted to a valid state identifier in the query.
Examples:
investment #=> %Investment{workflow: %Workflow{state: "subscribing.confirming_options"}}
Investment
|> where_any_state(:subscribing)
|> Repo.all() #=> [investment]
Investment
|> where_any_state("subscribing")
|> Repo.all() #=> [investment]
Investment
|> where_any_state([:subscribing, :confirming_options])
|> Repo.all() #=> [investment]
Investment
|> where_any_state(:resubmitting)
|> Repo.all() #=> []
"""
def where_any_state(subject_query, state) do
state_id = to_state_id(state)
subject_query
|> join_workflow_maybe()
|> where(
[workflow: workflow],
workflow.state == ^state_id or ilike(workflow.state, ^"#{state_id}.%")
)
end
def where_step_complete(q, s) when is_atom(s),
do: where_step_complete(q, Atom.to_string(s))
def where_step_complete(subject_query, step_name) do
subject_query
|> join_workflow_maybe()
|> join_workflow_steps_maybe()
|> where([workflow_step: step], step.name == ^step_name and step.complete?)
end
def to_state_id(states) when is_list(states) do
Enum.map(states, &to_state_id/1) |> Enum.join(".")
end
def to_state_id(state) when is_atom(state), do: Atom.to_string(state)
def to_state_id(state) when is_bitstring(state), do: state
def to_state_list(state) when is_bitstring(state) do
String.split(state, ".") |> Enum.map(&String.to_atom/1)
end
def to_step_name(step) when is_atom(step), do: Atom.to_string(step)
def to_step_name(step) when is_bitstring(step), do: step
defp join_workflow_maybe(subject_query) do
if has_named_binding?(subject_query, :workflow) do
subject_query
else
join(subject_query, :inner, [sub], w in assoc(sub, :workflow), as: :workflow)
end
end
defp join_workflow_steps_maybe(subject_query) do
if has_named_binding?(subject_query, :workflow_step) do
subject_query
else
join(subject_query, :inner, [workflow: w], s in assoc(w, :steps), as: :workflow_step)
end
end
end
|
lib/ex_state/ecto/query.ex
| 0.772659
| 0.655295
|
query.ex
|
starcoder
|
defmodule Monet.Query.Select do
@moduledoc """
A simple query builder.
rows = Select.new()
|> Select.columns("u.id, u.name")
|> Select.from("users u")
|> Select.join("roles r on u.role_id = r.id")
|> Select.where("u.power", :gt, 9000)
|> Select.limit(100)
|> Select.exec!() // returns a Monet.Result
|> Monet.rows()
"""
use Monet.Query.Where
defmacro __using__(_) do
quote do
use Monet.Query.Where
alias Monet.Query.Select
end
end
alias __MODULE__
@enforce_keys [:select, :from, :where, :order, :group, :limit, :offset]
defstruct @enforce_keys
def new() do
%Select{
from: [],
order: nil,
group: nil,
limit: nil,
offset: nil,
select: nil,
where: Where.new(),
}
end
@doc """
Columns to select. If not called, will select *.
Can can called multiple times. Can be called with an array, or a string.
This can really be anything and it's best to think of it as the test that
is placed between the `select` and the `from.
"""
def columns(q, [first | columns]) do
columns = Enum.reduce(columns, [first], fn c, acc -> [acc, ", ", c] end)
append_columns(q, columns)
end
def columns(s, column), do: append_columns(s, column)
defp append_columns(%{select: nil} = s, columns), do: %Select{s | select: columns}
defp append_columns(s, columns), do: %Select{s | select: [s.select, ", ", columns]}
@doc """
Table to select from. Can be called multiple times. This essentially becomes
what gets placed between the "from" and the "where".
You could do:
Select.from(s, "users")
# OR
Select.from(s, "(select 1 from another) x")
"""
def from(s, from), do: %Select{s | from: [s.from, from]}
@doc """
Join tables. There's no magic here. Doesn't know anything
about your tables (aka, you need to tell it what to join on):
Select.join(s, "table b on a.id = b.id")
This is just a shorthand for `from/2` but it injects the word
" [left|right|full]? join " for you
"""
def join(s, table), do: %Select{s | from: [s.from, [" join ", table]]}
def join(s, :left, table), do: %Select{s | from: [s.from, [" left join ", table]]}
def join(s, :right, table), do: %Select{s | from: [s.from, [" right join ", table]]}
def join(s, :full, table), do: %Select{s | from: [s.from, [" full join ", table]]}
def order(s, order), do: append_order(s, order)
def order(s, order, true), do: append_order(s, order)
def order(s, order, false), do: append_order(s, [order, " desc"])
defp append_order(%{order: nil} = s, order), do: %Select{s | order: [order]}
defp append_order(%{order: existing} = s, order), do: %Select{s | order: [existing, ", ", order]}
def group(s, group), do: %Select{s | group: group}
def limit(s, limit) when is_integer(limit), do: %Select{s | limit: limit}
def offset(s, offset) when is_integer(offset), do: %Select{s | offset: offset}
def exec(s, pool \\ Monet) do
{sql, args} = to_sql(s)
Monet.query(pool, sql, args)
end
def exec!(s, pool \\ Monet) do
case exec(s, pool) do
{:ok, result} -> result
{:error, err} -> raise err
end
end
def to_sql(s) do
{where, args} = Where.to_sql(s.where)
sql = ["select ", s.select || "*", " from ", s.from, where]
sql = case s.group do
nil -> sql
group -> [sql, " group by ", group]
end
sql = case s.order do
nil -> sql
order -> [sql, " order by ", order]
end
sql = case s.limit do
nil -> sql
limit -> [sql, " limit ", Integer.to_string(limit)]
end
sql = case s.offset do
nil -> sql
offset -> [sql, " offset ", Integer.to_string(offset)]
end
{sql, args}
end
end
defimpl Inspect, for: Monet.Query.Select do
def inspect(q, opts) do
import Inspect.Algebra
{sql, values} = Monet.Query.Select.to_sql(q)
sql = :erlang.iolist_to_binary(sql)
sql = Regex.split(~r/ (from|join|where|order by|limit|offset) /, sql, include_captures: true)
docs = fold_doc(sql, fn
<<" ", doc::binary>>, acc when doc in ["from ", "join", "where ", "group by", "order by ", "limit ", "offset "] -> concat([break("\n"), doc, acc])
doc, acc -> concat(doc, acc)
end)
concat [docs, break("\n"), to_doc(values, opts)]
end
end
|
lib/query/select.ex
| 0.611266
| 0.521532
|
select.ex
|
starcoder
|
defmodule Snitch.Domain.Taxonomy do
@moduledoc """
Interface for handling Taxonomy. It provides functions to modify Taxonomy.
"""
use Snitch.Domain
use Snitch.Data.Model
import AsNestedSet.Modifiable
import AsNestedSet.Queriable, only: [dump_one: 2]
import Ecto.Query
alias Ecto.Multi
alias Snitch.Data.Model.Image, as: ImageModel
alias Snitch.Data.Schema.{Taxon, Taxonomy, Image, Product}
alias Snitch.Tools.Helper.Taxonomy, as: Helper
alias Snitch.Tools.Helper.ImageUploader
alias Snitch.Data.Model.Product, as: ProductModel
@doc """
Adds child taxon to left, right or child of parent taxon.
Positon can take follwoing values.
Position - :left | :right | :child
"""
@spec add_taxon(Taxon.t(), Taxon.t(), atom) :: {:ok, Taxon.t()} | {:error, Ecto.Changeset.t()}
def add_taxon(%Taxon{} = parent, %Taxon{} = child, position) do
try do
taxon =
%Taxon{child | taxonomy_id: parent.taxonomy.id}
|> Repo.preload(:taxonomy)
|> create(parent, position)
|> AsNestedSet.execute(Repo)
{:ok, taxon}
rescue
error in Ecto.InvalidChangesetError ->
{:error, error.changeset}
end
end
@doc """
Checks if the taxon is a root taxon.
Note: If taxon is not asscoaited with taxonomy RuntimeError will be raised.
"""
@spec is_root?(Taxon.t()) :: boolean()
def is_root?(%Taxon{} = taxon) do
taxon = Repo.preload(taxon, :taxonomy)
case taxon.taxonomy do
nil ->
raise "No taxonomy is associated with taxon"
_ ->
taxon.id == taxon.taxonomy.root_id
end
end
@doc """
Adds taxon as root to the taxonomy
"""
@spec add_root(Taxon.t()) :: Taxon.t()
def add_root(%Taxon{} = root) do
root
|> create(:root)
|> AsNestedSet.execute(Repo)
end
@doc """
Get the root for the taxonomy of passed the taxon
"""
@spec get_root(Taxon.t()) :: Taxon.t()
def get_root(%Taxon{} = taxon) do
Taxon
|> AsNestedSet.root(%{taxonomy_id: taxon.taxonomy_id})
|> AsNestedSet.execute(Repo)
end
@doc """
Traverse in-order the taxonomy tree and return the tuple of root and list of
traversed taxons
"""
@spec inorder_list(Taxon.t()) :: {Taxon.t(), [Taxon.t()]}
def inorder_list(%Taxon{} = root) do
Taxon
|> AsNestedSet.traverse(
%{taxonomy_id: root.taxonomy_id},
[],
fn node, acc -> {node, [node | acc]} end,
fn node, acc -> {node, acc} end
)
|> AsNestedSet.execute(Repo)
end
@doc """
Dumps the taxonomy in tuple form as follows :
{ %Taxon{name: "root", [
{ %Taxon{name: "child1", [] }},
{ %Taxon{name: "child2", [] }}
] }}
"""
@spec dump_taxonomy(Taxon.t() | integer) :: {Taxon.t(), []}
def dump_taxonomy(%Taxon{} = taxon) do
dump_taxonomy(taxon.taxonomy_id)
end
def dump_taxonomy(id) do
Taxon
|> dump_one(%{taxonomy_id: id})
|> AsNestedSet.execute(Repo)
end
@doc """
Get all leaf Taxons for a Taxonomy
"""
def get_leaves(%Taxonomy{} = taxonomy) do
Taxon
|> AsNestedSet.leaves(%{taxonomy_id: taxonomy.id})
|> AsNestedSet.execute(Repo)
end
@doc """
Get taxonomy by name
"""
def get_taxonomy(name) do
Repo.get_by(Taxonomy, name: name)
end
def all_taxonomy, do: Repo.all(Taxonomy)
def get_default_taxonomy do
case all_taxonomy() |> List.first() do
%Taxonomy{} = taxonomy ->
{:ok, taxonomy}
nil ->
{:error, :not_found}
end
end
@doc """
Get taxonomy by id
"""
def get_taxonomy_by_id(id) do
Repo.get_by(Taxonomy, id: id)
end
def delete_taxonomy(id) do
try do
id
|> get_taxonomy_by_id
|> Repo.delete()
rescue
e in Ecto.ConstraintError -> {:error, e.message}
end
end
@spec get_all_taxonomy :: [map()]
def get_all_taxonomy do
Taxonomy
|> Repo.all()
|> Repo.preload(:root)
|> Enum.map(fn taxonomy -> %{taxonomy | taxons: dump_taxonomy(taxonomy.id)} end)
|> Enum.map(&Helper.convert_to_map/1)
end
@doc """
Gets all immediate children for a particular category
"""
@spec get_child_taxons(integer()) :: [Taxon.t()]
def get_child_taxons(taxon_id) do
case get_taxon(taxon_id) do
%Taxon{} = taxon ->
taxons =
taxon
|> AsNestedSet.children()
|> AsNestedSet.execute(Repo)
{:ok, taxons}
_ ->
{:error, :not_found}
end
end
@doc """
Gets all the taxons under a taxon tree
"""
@spec get_all_children_and_self(integer()) :: {:ok, [Taxon.t()]} | {:error, :not_found}
def get_all_children_and_self(taxon_id) do
case get_taxon(taxon_id) do
%Taxon{} = taxon ->
taxons =
taxon
|> AsNestedSet.self_and_descendants()
|> AsNestedSet.execute(Repo)
{:ok, taxons}
_ ->
{:error, :not_found}
end
end
@doc """
Gets all the ancestor taxons till the root level
"""
@spec get_ancestors(integer()) :: {:ok, [Taxon.t()]} | {:error, :not_found}
def get_ancestors(taxon_id) do
case Repo.get(Taxon, taxon_id) do
nil ->
{:error, :not_found}
taxon ->
ancestors =
taxon
|> AsNestedSet.ancestors()
|> AsNestedSet.execute(Repo)
{:ok, ancestors}
end
end
@doc """
Get taxon by id
"""
def get_taxon(id) do
Repo.get_by(Taxon, id: id)
|> Repo.preload([:image, :taxonomy, :variation_themes])
end
def get_taxon_by_name(name) do
Repo.get_by(Taxon, name: name)
end
def create_taxon(parent_taxon, %{image: nil} = taxon_params) do
taxon_struct = %Taxon{name: taxon_params.name}
with {:ok, taxon} <- add_taxon(parent_taxon, taxon_struct, :child) do
Taxon.update_changeset(
taxon,
Map.put(taxon_params, :variation_theme_ids, taxon_params.themes)
)
|> Repo.update()
end
end
@doc """
Updates all category slug based on the name.
Warning: This methods should be used only when the slug are not present.
Running this method might change the existing slug if the tree of above a
category is modified.
"""
def update_all_categories_slug() do
Taxon
|> Repo.all()
|> Enum.map(&Taxon.changeset(&1, %{}))
|> Enum.map(&Repo.update(&1))
end
def create_taxon(parent_taxon, %{image: image} = taxon_params) do
multi =
Multi.new()
|> Multi.run(:struct, fn _, _ ->
taxon_struct = %Taxon{name: taxon_params.name}
add_taxon(parent_taxon, taxon_struct, :child)
end)
|> Multi.run(:image, fn _, %{struct: struct} ->
params = %{"image" => Map.put(image, :url, ImageModel.image_url(image.filename, struct))}
QH.create(Image, params, Repo)
end)
|> Multi.run(:association, fn _, %{image: image, struct: struct} ->
params = Map.put(%{}, :taxon_image, %{image_id: image.id})
Taxon.update_changeset(
struct,
Map.put(params, :variation_theme_ids, taxon_params.themes)
)
|> Repo.update()
end)
|> ImageModel.upload_image_multi(taxon_params.image)
|> ImageModel.persist()
end
@doc """
Update the given taxon.
"""
def update_taxon(taxon, %{"image" => nil} = params) do
taxon |> Taxon.update_changeset(params) |> Repo.update()
end
def update_taxon(taxon, %{"image" => image} = params) do
ImageModel.update(Taxon, taxon, params, "taxon_image")
end
@doc """
Create a taxonomy with given name.
"""
def create_taxonomy(name) do
Multi.new()
|> Multi.run(:taxonomy, fn _, _ ->
%Taxonomy{name: name} |> Repo.insert()
end)
|> Multi.run(:root_taxon, fn _, %{taxonomy: taxonomy} ->
taxon = %Taxon{name: name, taxonomy_id: taxonomy.id} |> add_root
{:ok, taxon}
end)
|> Repo.transaction()
end
@doc """
Delete a taxon along with all the products associated with that taxon tree.
"""
def delete_taxon(taxon_id) do
case get_taxon(taxon_id) do
%Taxon{} = taxon ->
Multi.new()
|> Multi.run(:delete_products, fn _, _ -> ProductModel.delete_by_category(taxon) end)
|> Multi.run(:category, fn _, _ -> do_delete_taxon(taxon) end)
|> Repo.transaction()
nil ->
{:error, :not_found}
end
end
defp do_delete_taxon(%Taxon{} = taxon) do
taxon
|> AsNestedSet.delete()
|> AsNestedSet.execute(Snitch.Core.Tools.MultiTenancy.Repo)
{:ok, taxon}
end
end
|
apps/snitch_core/lib/core/domain/taxonomy/taxonomy.ex
| 0.700075
| 0.476214
|
taxonomy.ex
|
starcoder
|
defmodule TwitchApi.Client do
alias TwitchApi.Util.Params
@moduledoc """
Builds the client for the API wrapper
"""
@typedoc """
Credentials
"""
@type auth :: %{client_id: String.t(), client_secret: String.t()}
@typedoc """
The client struct
"""
@type t :: %__MODULE__{auth: auth | nil}
defstruct auth: nil
@doc """
If using configuration; takes /0 and returns a client
## Examples
```elixir
TwitchApi.Client.new()
=> %TwitchApi.Client{
auth: %{client_id: "ABC123", client_secret: "DEF456"}
}
```
"""
@spec new() :: t
def new do
auth = %{
client_id: Application.fetch_env!(:twitch_api, :client_id),
client_secret: Application.fetch_env!(:twitch_api, :client_secret)
}
new(auth)
end
@doc """
Explicitly takes a client id and secret and returns a `%TwitchApi.Client{}`.
This is useful if you have many services that require different
credentials, or if you are using elixir umbrellas.
## Examples
iex> TwitchApi.Client.new(%{client_id: "ABC123", client_secret: "DEF456"})
%TwitchApi.Client{
auth: %{client_id: "ABC123", client_secret: "DEF456"}
}
"""
@spec new(auth) :: t
def new(auth = %{client_id: id, client_secret: secret}) do
case {id, secret} do
{nil, _} ->
raise ArgumentError, message: ":client_id not set or is empty"
{_, nil} ->
raise ArgumentError, message: ":client_secret not set or is empty"
{"", _} ->
raise ArgumentError, message: ":client_id not set or is empty"
{_, ""} ->
raise ArgumentError, message: ":client_secret not set or is empty"
{_id, _secret} ->
%__MODULE__{auth: auth}
end
end
def get(client, path \\ "", headers \\ [], params \\ %{})
def get(client = %TwitchApi.Client{}, path, headers, params) do
url = Params.url_params_list_parser(path, params)
{:ok, token} = TwitchApi.TokenCache.get()
headers = [{"Client-ID", "#{client.auth.client_id}"} | headers]
headers = [{"Authorization", "Bearer #{token}"} | headers]
TwitchApi.get!(url, headers)
end
def post(client, path \\ "", body \\ "", headers \\ [])
def post(client = %TwitchApi.Client{}, path, body, headers) do
url = Params.url_params_list_parser(path, %{})
# {:ok, token} = TwitchApi.TokenCache.get()
headers = [{"Client-ID", "#{client.auth.client_id}"} | headers]
# headers = [{"Authorization", "Bearer #{token}"} | headers]
TwitchApi.post!(url, body, headers)
end
end
|
lib/client.ex
| 0.874527
| 0.448487
|
client.ex
|
starcoder
|
defmodule Quadquizaminos.Instructions do
def game_instruction() do
"""
<h2>Explore Cyber Security's Most Important Emerging Strategic Focus with the Digital Era's Most Beloved Game</h2>
<h2>Power Up!</h2>
<p>You don't need to be a supply chain expert - this game assumes you are a novice
and it teaches you what you need to know. You gain points by clearing rows as in a typical tetrominoes game.
Here's the twist - you can answer quiz questions about supply chain both to gain points and to gain powerups.</p>
<p>Powerups help with classic tetrominoes strategy but also are needed
to combat:</p>
<ul>
<li>vulnerabilities (unfortunate gaps in an otherwise functioning security paradigm)</li>
<li>licensing issues (extraneous blocks that gunk up the works)</li>
<li>cyber attacks(rapid changes in operating conditions that take over entire sections
of the gameboard and speed the game up uncontrollably)</li>
<li>licensing lawsuits (tiresome procedures that gum up entire sections of the game board
and slow the game to a snail's pace)</li>
</ul>
<p>Just like in real life, information powerups can prevent attacks
and address emerging vulnerabilities as the operational environment speeds up and slows down around us.
Play the game and find out!
</p>
<h2>How to play</h2>
<p>The game involves completing rows for points or answering questions for points.
There are many different aspects covered in the following sections:
<ol>
<li>Objective</li>
<li>Movement</li>
<li>Speed</li>
<li>Scoring</li>
<li>Vulnerabilities, Licensing Issues, Cyberattacks, Lawsuits</li>
<li>Quiz</li>
<li>Sponsors</li>
<li>Powerups</li>
<li>Prizes / Contests</li>
<li>Strategy, Tips & Tricks</li>
</ol></p>
<h3>Objective</h3>
<p>The objective of the game is to move and rotate falling geometric shapes
(quadblocks)
to form complete rows at the bottom of the game board,
gaining points in process.
Answering questions also adds points and sometimes gives the player
"powerups" which help with playing the game.
</p>
<p>
Of course, the real objective is teach you about supply chain security
in a fun an entertaining manner.
</p>
<p>
The game ends when you go out of business because
"your supply chain gets too long"
and something goes wrong in your supply chain
i.e. the stack of blocks reaches the top of the game board.
<h3>Movement</h3>
<ol>
<li>Up arrow key rotates the blocks</li>
<li>Left arrow key moves the blocks to the left</li>
<li>Right arrow key moves the blocks to the right</li>
<li>Down arrow key moves the blocks down</li>
<li>space bar pauses game and brings up quiz</li>
</ol>
<h3>Speed</h3>
<p>Speed refers to how fast the quadblocks fall.
There are 7 speeds to the game:
<ul>
<li>"full throttle" - ~20 rows per second</li>
<li>"high speed" - ~10 rows per second</li>
<li>"fast" - ~4 rows per second</li>
<li>"moderate" - ~3 rows per second</li>
<li>"leisurely" - ~2 rows per second</li>
<li>"sedate" - ~1.5 rows per second</li>
<li>"lethargic" - ~1 rows per second</li>
</ol>
</p>
<p>
Speed affects scoring in several ways which will be described under scoring
</p>
<p>
The game starts at a "fast" speed.
The game speeds up gradually (ie one step in above list) over time (#?).
</p>
<p>If a "Cyberattack" (see section whatever) occurs,
the game jumps to "full throttle" from whatever speed it was on (#?).
</p>
<p>If a "License Lawsuit" (see section whatever) occurs,
the game jumps to "lethargic" from whatever speed it was on(#?).
</p>
<p>Powerups (see later section) can be used to:
<ol>
<li>speed up one notch (#?)</li>
<li>slow down one notch (#?)</li>
</ol>
<h3>Scoring</h3>
<p>Points are scored in several ways:
<ol>
<li>clock ticks / block drops</li>
<li>clearing rows</li>
<li>answering questions</li>
</ol>
The amount of points scored is also influenced by multipliers.
The multipliers may increase the score
(e.g. operating at quicker speeds mulitplies the value of clearing a row)(#?)
or may decrease the score (e.g. the more vulnerabilities on the board,
the lower the value of clearing a row)(#?)
</p>
<p>
For each clock tick, the block in play drops one row and the score changes.
How much it increases/decreases depends on the speed.
Obviously faster speeds make clock ticks more often.
But in addition, each tick is worth more at faster speeds(#?):
<ul>
<li>"full throttle": 4 points per tick</li>
<li>"high speed": 3 points per tick</li>
<li>"fast": 2 points per tick</li>
<li>"moderate": 1 point per tick</li>
<li>"leisurely": 0 points per tick (ie score does not increase)</li>
<li>"sedate": (-1) point per tick(ie score actually decreases)</li>
<li>"lethargic": (-5) points per tick(ie score actually decreases)</li>
</ol>
</p>
<p>
Clearing rows gains points as well as frees up space to play longer.
The amount of points for clearing a row depends on
(1) the number of rows cleared in that tick
(2) the multipliers.
</p>
<p>
It is possible to fill more than one row
when a quadblock reaches the "bottom".
The amount of points goes up exponentially
with the number of rows filled.
If no multipliers are in affect, then the points are
100 times 2 to the power of the number of rows. eg:
<ol>
<li>1 row = 200 points</li>
<li>2 row = 400 points</li>
<li>3 row = 800 points</li>
<li>4 row = 1,600 points</li>
<li>etc</li>
</ol>
It might appear that the highest number of rows that could be cleared
in one tick would be 4 - the length of the longest quadblock.
Howevers there are some tips & tricks with powerups (see later section)
that the clock is 'frozen' while you are doing add/delete/move/etc
and more importantly that rows completed with add/move do not score
until the falling brick touches joins the bottom blocks.
</p>
<p>
put 5 row example gif here
</p>
<p>
Which means you could, in theory at least,
complete almost as many rows as there are on the board.
10 rows would be 102,400 points.
15 rows would be 3,276,800 points.
And that is before multipliers.
</p>
<p>
Multipliers increase the score.
One multiplier is the speed at the tick that clears the row:
<ul>
<li>"full throttle": Multiplier = 4</li>
<li>"high speed": Multiplier = 3</li>
<li>"fast": Multiplier = 2</li>
<li>"moderate": Multiplier = 1</li>
<li>"leisurely": Multiplier = 1</li>
<li>"sedate": Multiplier = 1</li>
<li>"lethargic": Multiplier = 0.5 (ie less points)</li>
</ol>
</p>
<p>
Other multipliers are TBD
</p>
<p>
The third way to score points is by answering questions.
Each question gives a certain amount of points for a correct answer.
It also subtracts for incorrect answers,
but the scoring is setup such that even if you don't get it correct
until the last guess, you will still be ahead
albeit not as far ahead as getting it correct on first try.
</p>
<p>
Points per question increase as you get further into questions
in a given category.
</p>
<p>
Answering questions also adds powerups for some questions.
See later section.
</p>
<h3>Vulnerabilities, Licensing Issues, Cyberattacks, Lawsuits</h3>
<p>
Normal blocks are squares of one color (two tones).
But just like in real life, problems can crop up in your supply chain.
There are two types of vulnerabilities.
Known vulnerabilities are yellow/gray blocks
that show up at random either in dropping blocks
or in the blocks at the bottom that haven't been cleared yet.
Invisible vulnerabilities (zero days) are white-on-white blocks
that you only notice thru behavior (#?).
</p>
<p>
Vulnerabilities have several impacts.
The main impact is that any vulnerability in a row will prevent
it being cleared (#104).
There is also a subtle impact that the more vulnerabilities,
the more likely you will be hit with a cyber attack(#?).
The speed at which vulnerabilities arrive is a function of time(#?),
a function of the number of wrong answers to questions(#?),
and is slowed down by enabling certain powerups(#?).
</p>
<p>
License errors are similar.
They are brown/grey blocks (#?),
and also prevent a row from being cleared.
The more license errors,
the higher the likelihood of a lawsuit(#?).
The speed at which license errors arrive is a function of time (#?),
a function of the number of wrong answers to questions(#?),
and is slowed down by enabling certain powerups(#?).
</p>
<p>
Cyberattacks occur from ignoring vulnerabilities
in your supply chain(#?).
Cyberattacks speed the game up to the highest speed(#?),
and cause an entire blocking row of IOCs (grey/yellow)
at row 10 (#?).
Cyberattacks are cleared with powerups.
</p>
<p>
Lawsuits occur from ignoring license errors
in your supply chain(#?).
Lawsuits slow the game down to the lowest speed(#?),
and cause lawsuit blockers (grey/brown)
in rows 5-15
at column 5(#?).
Lawsuits are cleared with powerups.
</p>
<h3>Quiz</h3>
<ol>
<li>answer questions to gain points</li>
<li>answer questions to gain powerups</li>
<li>different categories of questions</li>
<li>points/question increase deeper into a category</li>
<li>"more powerful" powerups deeper into a category</li>
<li>powerups 'relate' to a category (eg SBOM "blow up", OpenC2 'commands', Phoenix 'rebirth', ...)</li>
<li> ... </li>
</ol>
<h3>Sponsors</h3>
<p>Sponsors make the game possible.</p>
<p>Sponsors offer prizes (see section)</p>
<p>
Sponsor category of quiz is the only quiz category
where you can get the 'superpower' powerup
which allows you to pick anyother powerup when you need it.
</p>
<p>
Please visit the sponsor quiz questions.
</p>
<p>
To be a sponsor required donating resources to the game
(dollars or sweat equity) but it also required
truthfully answering questions about their own supply chain.
This included, in most cases, confessing ignorance to
certain aspects of supply chain learned by playing this game.
</p>
<h3>Powerups</h3>
<p>
Powerups are won by correctly answering certain questions.
Powerups appear on the categories bar when paused.
</p>
<p>
add pic here
</p>
<p>
<sl>
<li><i class="fas fa-plus-square"></i> add a block - useful to fill in holes</li>
<li><i class="fas fa-minus-square"></i> remove a block - useful to remove problem blocks</li>
<li><i class="fas fa-arrows-alt"></i> move a block - helpful both to get a block 'out of the way' and to fill in hole</li>
<li><i class="fas fa-eraser"></i> clear blocks - use in attacked or sued, helpful if supply chain gets too long</li>
<li><i class="fas fa-fast-forward"></i> speed up - gets you more points on row clearing, needed if lawsuit is slowing your business</li>
<li><i class="fas fa-fast-backward"></i> slow down - necesary if attacked, useful if game is going too fast</li>
<li><i class="fas fa-wrench"></i> fix a vulnerability</li>
<li><i class="fas fa-screwdriver"></i> fix a licensing issue</li>
<li>above here works, below here will work soon</li>
<li><i class="fas fa-hammer"></i> remove all vulnerabilities</li>
<li><i class="fas fa-tape"></i> remove all licensing issues</li>
<li><i class="fab fa-superpowers"></i> Superpower - exchange for any other powerup</li>
<li><i class="fas fa-crosshairs"></i> pick next quadblock to fall - useful when you need a certain block to clear rows</li>
<li><i class="fas fa-tools"></i> OpenChain - slows down slows down license issues</li>
<li><i class="fas fa-microscope"></i> forensics - helps prepare for attacks</li>
<li><i class="fas fa-gavel"></i> legal is trained and ready on cyber - which slows down licensing issues</li>
<li><i class="fas fa-file-contract"></i> Cyber Insurance - lessens the removal of points due to attacks and lawsuits</li>
<li><i class="far fa-id-card"></i> SBOM - slows down vulnerability creation, slows down license issues</li>
<li><i class="fas fa-toolbox"></i> automation - slows down vulnerability creation, slows down license issues</li>
</ol>
</p>
<h3>Prizes, Contests</h3>
There will be a contest on date at time.
You must be an RSAC registered attendee to win.
Any given person will only be entitled to one prize.
Winners will select from among the available prizes until
there are none left.
Prizes include:
- Win a private Cocktail Session via Zoom with Mixologist/Sommelier <NAME>eng. This will include an in depth demo of three recipes catered to your favorite novel or other source of group inspiration. Chantal will provide advance recipes and a tool list for your unlimited number of guests who are then encouraged to make the drinks alongside and ask questions along the way.
- Win a private Absinthe tutorial with Mixologist/Sommelier <NAME>. You and your unlimited number of guests will schedule a time on Zoom where Chantal will teach a condensed class covering the history and myths of Absinthe, proper service and its usage in cocktails
- sFractal consulting (5 hours)
- Podii software development (5 hours)
- Briar Cutter technical writing (5 hours)
- Win your very own custom cocktail recipe designed by Mixologist/Sommelier <NAME>eng based on your home bar inventory and literary tastes
<h3>Strategy, Tips & Tricks</h3>
<p>tetrominoes vs quiz</p>
<p>
timing funny wrt QnA/powerups
</p>
<p>
multi-row clearing trick
</p>
<p>
just like in real life, it is sometimes expedient
to defer patches due to more imediate revenue needs,
sometimes leaving vuln or lic-issue in place lets you build rows
that can be cleared in a larger block
- but be careful since it also increases you likelihood
of a cyberattack or lawsuit.
It's not an easy tradeoff.
The game is not tilted towards fixing - you will get a lower
score if you spend all your time fixing.
Conversely you will likely go out of business due to
a cyberattack or lawsuit if ignore them entirely.
The best strategy is trading off between the two,
and investing in areas that reduce the likelihood
(e.g. SBOM, Automation, OpenChain) of them occuring in the first place.
</p>
<p>
Note slowing down vuln/lic means it can still happen,
just less likely.
</p>
<p>
The richer you are (ie the higher your score),
the more tempting target you are for cyberattack
and lawsuits (#?) so the more you need your ducks in a row.
</p>
"""
end
end
|
lib/quadquizaminos/instructions.ex
| 0.551332
| 0.746116
|
instructions.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.