code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Want.List do
@moduledoc """
Manages conversions to and from lists.
"""
use Want.Type
@type element :: any()
@type result :: {:ok, list(element())} | {:error, binary()}
@default_separator ","
@doc """
Cast an input into a list. By default this function will simply break up the input into list elements, but
further casting and validation of elements can be performed by providing an `element` option. The separator
used to split the list defaults to the comma character and this can be controlled using the `separator` option.
## Options
* `:separator` - Determines the character(s) used to separate list items. Defaults to the comma character.
* `:element` - Provides the ability to further control how list elements are cast and validated. Similar to the
`map` and `keywords` functions, accepts a keyword list with its own `:type` field and validation options.
## Examples
iex> Want.List.cast("1")
{:ok, ["1"]}
iex> Want.List.cast("1", element: [type: :integer])
{:ok, [1]}
iex> Want.List.cast("1,2,3,4", element: [type: :integer])
{:ok, [1, 2, 3, 4]}
iex> Want.List.cast("1:2:3:4", separator: ":", element: [type: :integer])
{:ok, [1, 2, 3, 4]}
iex> Want.List.cast("hello:world", separator: ":", element: [type: :enum, valid: [:hello, :world]])
{:ok, [:hello, :world]}
iex> Want.List.cast("hello:world", separator: ":", element: [type: :enum, valid: [:hello]])
{:ok, [:hello]}
"""
@spec cast(value :: any(), opts :: Keyword.t()) :: result()
def cast(value),
do: cast(value, [])
def cast(value, opts) when is_binary(value) do
value
|> String.split(Keyword.get(opts, :separator, @default_separator))
|> cast(opts)
end
def cast(value, opts) when is_list(value) do
case {opts[:element], Keyword.get(Keyword.get(opts, :element, []), :type)} do
{nil, _} ->
{:ok, value}
{o, :enum} ->
{:ok, cast_elements(value, Want.Enum, o)}
{o, :integer} ->
{:ok, cast_elements(value, Want.Integer, o)}
{o, :atom} ->
{:ok, cast_elements(value, Want.Atom, o)}
{o, :float} ->
{:ok, cast_elements(value, Want.Float, o)}
{o, :sort} ->
{:ok, cast_elements(value, Want.Sort, o)}
{o, :string} ->
{:ok, cast_elements(value, Want.String, o)}
{o, :list} ->
{:ok, cast_elements(value, Want.List, o)}
{o, :map} ->
{:ok, cast_elements(value, Want.Map, o)}
{o, :keywords} ->
{:ok, cast_elements(value, Want.Keyword, o)}
end
end
def cast(value, _),
do: {:error, "Failed to convert value #{inspect value} to integer."}
@doc false
defp cast_elements(list, mod, opts) do
list
|> Enum.reduce([], fn(elem, out) ->
case mod.cast(elem, opts) do
{:ok, elem} ->
[elem | out]
{:error, _reason} ->
out
end
end)
|> Enum.reverse()
end
end
|
lib/want/list.ex
| 0.909048
| 0.692174
|
list.ex
|
starcoder
|
defmodule ParallelStream do
alias ParallelStream.Mapper
alias ParallelStream.Each
alias ParallelStream.Filter
@moduledoc ~S"""
Parallel stream implementation for Elixir.
"""
@doc """
Creates a stream that will apply the given function on enumeration in
parallel and return the functions return value.
## Options
These are the options:
* `:num_workers` – The number of parallel operations to run when running the stream.
* `:worker_work_ratio` – The available work per worker, defaults to 5. Higher rates will mean more work sharing, but might also lead to work fragmentation slowing down the queues.
## Examples
Map and duplicate the numbers:
iex> parallel_stream = 1..5 |> ParallelStream.map(fn i -> i * 2 end)
iex> parallel_stream |> Enum.to_list
[2, 4, 6, 8, 10]
"""
def map(stream, mapper, options \\ []) do
Mapper.map(stream, mapper, options)
end
@doc """
Creates a stream that will apply the given function on enumeration in
parallel. The functions return value will be thrown away, hence this is
useful for producing side-effects.
## Options
These are the options:
* `:num_workers` – The number of parallel operations to run when running the stream.
* `:worker_work_ratio` – The available work per worker, defaults to 5. Higher rates will mean more work sharing, but might also lead to work fragmentation slowing down the queues.
## Examples
Iterate and write the numbers to stdout:
iex> parallel_stream = 1..5 |> ParallelStream.each(&IO.write/1)
iex> parallel_stream |> Stream.run
:ok # 12345 appears on stdout
"""
def each(stream, iter, options \\ []) do
Each.each(stream, iter, options)
end
@doc """
Creates a stream that will apply the given function on enumeration in
parallel and only pass the values for which the function returns truthy
downstream.
## Options
These are the options:
* `:num_workers` – The number of parallel operations to run when running the stream.
* `:worker_work_ratio` – The available work per worker, defaults to 5. Higher rates will mean more work sharing, but might also lead to work fragmentation slowing down the queues.
## Examples
Map and filter the even numbers:
iex> parallel_stream = 1..5 |> ParallelStream.filter(fn i -> i |> rem(2) == 0 end)
iex> parallel_stream |> Enum.to_list
[2,4]
"""
def filter(stream, filter, options \\ []) do
Filter.filter(stream, filter, options)
end
@doc """
Creates a stream that will apply the given function on enumeration in
parallel and only pass the values for which the function returns falsy
downstream.
## Options
These are the options:
* `:num_workers` – The number of parallel operations to run when running the stream.
* `:worker_work_ratio` – The available work per worker, defaults to 5. Higher rates will mean more work sharing, but might also lead to work fragmentation slowing down the queues.
## Examples
Map and reject the even numbers:
iex> parallel_stream = 1..5 |> ParallelStream.reject(fn i -> i |> rem(2) == 0 end)
iex> parallel_stream |> Enum.to_list
[1,3,5]
"""
def reject(stream, filter, options \\ []) do
Filter.reject(stream, filter, options)
end
end
|
lib/parallel_stream.ex
| 0.914444
| 0.670283
|
parallel_stream.ex
|
starcoder
|
defmodule AWS.ACM do
@moduledoc """
AWS Certificate Manager
Welcome to the AWS Certificate Manager (ACM) API documentation.
You can use ACM to manage SSL/TLS certificates for your AWS-based websites
and applications. For general information about using ACM, see the [ *AWS
Certificate Manager User Guide*
](https://docs.aws.amazon.com/acm/latest/userguide/).
"""
@doc """
Adds one or more tags to an ACM certificate. Tags are labels that you can
use to identify and organize your AWS resources. Each tag consists of a
`key` and an optional `value`. You specify the certificate on input by its
Amazon Resource Name (ARN). You specify the tag by using a key-value pair.
You can apply a tag to just one certificate if you want to identify a
specific characteristic of that certificate, or you can apply the same tag
to multiple certificates if you want to filter for a common relationship
among those certificates. Similarly, you can apply the same tag to multiple
resources if you want to specify a relationship among those resources. For
example, you can add the same tag to an ACM certificate and an Elastic Load
Balancing load balancer to indicate that they are both used by the same
website. For more information, see [Tagging ACM
certificates](https://docs.aws.amazon.com/acm/latest/userguide/tags.html).
To remove one or more tags, use the `RemoveTagsFromCertificate` action. To
view all of the tags that have been applied to the certificate, use the
`ListTagsForCertificate` action.
"""
def add_tags_to_certificate(client, input, options \\ []) do
request(client, "AddTagsToCertificate", input, options)
end
@doc """
Deletes a certificate and its associated private key. If this action
succeeds, the certificate no longer appears in the list that can be
displayed by calling the `ListCertificates` action or be retrieved by
calling the `GetCertificate` action. The certificate will not be available
for use by AWS services integrated with ACM.
<note> You cannot delete an ACM certificate that is being used by another
AWS service. To delete a certificate that is in use, the certificate
association must first be removed.
</note>
"""
def delete_certificate(client, input, options \\ []) do
request(client, "DeleteCertificate", input, options)
end
@doc """
Returns detailed metadata about the specified ACM certificate.
"""
def describe_certificate(client, input, options \\ []) do
request(client, "DescribeCertificate", input, options)
end
@doc """
Exports a private certificate issued by a private certificate authority
(CA) for use anywhere. The exported file contains the certificate, the
certificate chain, and the encrypted private 2048-bit RSA key associated
with the public key that is embedded in the certificate. For security, you
must assign a passphrase for the private key when exporting it.
For information about exporting and formatting a certificate using the ACM
console or CLI, see [Export a Private
Certificate](https://docs.aws.amazon.com/acm/latest/userguide/gs-acm-export-private.html).
"""
def export_certificate(client, input, options \\ []) do
request(client, "ExportCertificate", input, options)
end
@doc """
Retrieves an Amazon-issued certificate and its certificate chain. The chain
consists of the certificate of the issuing CA and the intermediate
certificates of any other subordinate CAs. All of the certificates are
base64 encoded. You can use
[OpenSSL](https://wiki.openssl.org/index.php/Command_Line_Utilities) to
decode the certificates and inspect individual fields.
"""
def get_certificate(client, input, options \\ []) do
request(client, "GetCertificate", input, options)
end
@doc """
Imports a certificate into AWS Certificate Manager (ACM) to use with
services that are integrated with ACM. Note that [integrated
services](https://docs.aws.amazon.com/acm/latest/userguide/acm-services.html)
allow only certificate types and keys they support to be associated with
their resources. Further, their support differs depending on whether the
certificate is imported into IAM or into ACM. For more information, see the
documentation for each service. For more information about importing
certificates into ACM, see [Importing
Certificates](https://docs.aws.amazon.com/acm/latest/userguide/import-certificate.html)
in the *AWS Certificate Manager User Guide*.
<note> ACM does not provide [managed
renewal](https://docs.aws.amazon.com/acm/latest/userguide/acm-renewal.html)
for certificates that you import.
</note> Note the following guidelines when importing third party
certificates:
<ul> <li> You must enter the private key that matches the certificate you
are importing.
</li> <li> The private key must be unencrypted. You cannot import a private
key that is protected by a password or a passphrase.
</li> <li> If the certificate you are importing is not self-signed, you
must enter its certificate chain.
</li> <li> If a certificate chain is included, the issuer must be the
subject of one of the certificates in the chain.
</li> <li> The certificate, private key, and certificate chain must be
PEM-encoded.
</li> <li> The current time must be between the `Not Before` and `Not
After` certificate fields.
</li> <li> The `Issuer` field must not be empty.
</li> <li> The OCSP authority URL, if present, must not exceed 1000
characters.
</li> <li> To import a new certificate, omit the `CertificateArn` argument.
Include this argument only when you want to replace a previously imported
certifica
</li> <li> When you import a certificate by using the CLI, you must specify
the certificate, the certificate chain, and the private key by their file
names preceded by `file://`. For example, you can specify a certificate
saved in the `C:\temp` folder as
`file://C:\temp\certificate_to_import.pem`. If you are making an HTTP or
HTTPS Query request, include these arguments as BLOBs.
</li> <li> When you import a certificate by using an SDK, you must specify
the certificate, the certificate chain, and the private key files in the
manner required by the programming language you're using.
</li> <li> The cryptographic algorithm of an imported certificate must
match the algorithm of the signing CA. For example, if the signing CA key
type is RSA, then the certificate key type must also be RSA.
</li> </ul> This operation returns the [Amazon Resource Name
(ARN)](https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html)
of the imported certificate.
"""
def import_certificate(client, input, options \\ []) do
request(client, "ImportCertificate", input, options)
end
@doc """
Retrieves a list of certificate ARNs and domain names. You can request that
only certificates that match a specific status be listed. You can also
filter by specific attributes of the certificate. Default filtering returns
only `RSA_2048` certificates. For more information, see `Filters`.
"""
def list_certificates(client, input, options \\ []) do
request(client, "ListCertificates", input, options)
end
@doc """
Lists the tags that have been applied to the ACM certificate. Use the
certificate's Amazon Resource Name (ARN) to specify the certificate. To add
a tag to an ACM certificate, use the `AddTagsToCertificate` action. To
delete a tag, use the `RemoveTagsFromCertificate` action.
"""
def list_tags_for_certificate(client, input, options \\ []) do
request(client, "ListTagsForCertificate", input, options)
end
@doc """
Remove one or more tags from an ACM certificate. A tag consists of a
key-value pair. If you do not specify the value portion of the tag when
calling this function, the tag will be removed regardless of value. If you
specify a value, the tag is removed only if it is associated with the
specified value.
To add tags to a certificate, use the `AddTagsToCertificate` action. To
view all of the tags that have been applied to a specific ACM certificate,
use the `ListTagsForCertificate` action.
"""
def remove_tags_from_certificate(client, input, options \\ []) do
request(client, "RemoveTagsFromCertificate", input, options)
end
@doc """
Renews an eligable ACM certificate. At this time, only exported private
certificates can be renewed with this operation. In order to renew your ACM
PCA certificates with ACM, you must first [grant the ACM service principal
permission to do
so](https://docs.aws.amazon.com/acm-pca/latest/userguide/PcaPermissions.html).
For more information, see [Testing Managed
Renewal](https://docs.aws.amazon.com/acm/latest/userguide/manual-renewal.html)
in the ACM User Guide.
"""
def renew_certificate(client, input, options \\ []) do
request(client, "RenewCertificate", input, options)
end
@doc """
Requests an ACM certificate for use with other AWS services. To request an
ACM certificate, you must specify a fully qualified domain name (FQDN) in
the `DomainName` parameter. You can also specify additional FQDNs in the
`SubjectAlternativeNames` parameter.
If you are requesting a private certificate, domain validation is not
required. If you are requesting a public certificate, each domain name that
you specify must be validated to verify that you own or control the domain.
You can use [DNS
validation](https://docs.aws.amazon.com/acm/latest/userguide/gs-acm-validate-dns.html)
or [email
validation](https://docs.aws.amazon.com/acm/latest/userguide/gs-acm-validate-email.html).
We recommend that you use DNS validation. ACM issues public certificates
after receiving approval from the domain owner.
"""
def request_certificate(client, input, options \\ []) do
request(client, "RequestCertificate", input, options)
end
@doc """
Resends the email that requests domain ownership validation. The domain
owner or an authorized representative must approve the ACM certificate
before it can be issued. The certificate can be approved by clicking a link
in the mail to navigate to the Amazon certificate approval website and then
clicking **I Approve**. However, the validation email can be blocked by
spam filters. Therefore, if you do not receive the original mail, you can
request that the mail be resent within 72 hours of requesting the ACM
certificate. If more than 72 hours have elapsed since your original request
or since your last attempt to resend validation mail, you must request a
new certificate. For more information about setting up your contact email
addresses, see [Configure Email for your
Domain](https://docs.aws.amazon.com/acm/latest/userguide/setup-email.html).
"""
def resend_validation_email(client, input, options \\ []) do
request(client, "ResendValidationEmail", input, options)
end
@doc """
Updates a certificate. Currently, you can use this function to specify
whether to opt in to or out of recording your certificate in a certificate
transparency log. For more information, see [ Opting Out of Certificate
Transparency
Logging](https://docs.aws.amazon.com/acm/latest/userguide/acm-bestpractices.html#best-practices-transparency).
"""
def update_certificate_options(client, input, options \\ []) do
request(client, "UpdateCertificateOptions", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "acm"}
host = build_host("acm", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "CertificateManager.#{action}"}
]
payload = Poison.Encoder.encode(input, %{})
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/acm.ex
| 0.870377
| 0.673178
|
acm.ex
|
starcoder
|
defmodule Txbox do
@moduledoc """


Txbox is a Bitcoin transaction storage schema. It lets you store Bitcoin
transactions in your application's database with searchable and filterable
semantic metadata. Txbox is inspired by [TXT](https://txt.network/) but
adapted to slot into an Elixir developers toolset.
* Built on Ecto! Store Bitcoin Transactions in your database and define associations with any other data from your app's domain.
* Built in queue for pushing signed transactions to the Bitcoin network via the [Miner API](https://github.com/bitcoin-sv/merchantapi-reference).
* Auto-syncs with the [Miner API](https://github.com/bitcoin-sv/merchantapi-reference) of your choice, and caches signed responses.
* Aims to be compatible with TXT, with similar schema design and API for searching and filtering.
* Unlike TXT, no web UI or HTTP API is exposed. Txbox is purely a database schema with query functions - the rest is up to you.
* Coming soon (™) - Seamlessly import and export from other TXT-compatible platforms.
## Installation
The package can be installed by adding `txbox` to your list of dependencies in
`mix.exs`.
def deps do
[
{:txbox, "~> 0.3"}
]
end
Once installed, update your application's configuration, making sure Txbox
knows which Repo to use.
```elixir
# config/config.exs
config :txbox, repo: MyApp.Repo
```
Next, add `Txbox` to your application's supervision tree.
children = [
{Txbox, [
# Manic miner configuration (defaults to :taal)
miner: {:taal, headers: [{"token", "MYTOKEN"}]},
# Maximum number of times to attempt polling the miner (default is 20)
max_status_attempts: 20,
# Interval (in seconds) between each mAPI request (default is 300 - 5 minutes)
retry_status_after: 300
]}
]
Supervisor.start_link(children, strategy: :one_for_one)
Finally, run the following tasks to generate and run the required database
migrations.
```console
mix txbox.gen.migrations
mix ecto.migrate
```
## Upgrading
If upgrading from a previous version of `txbox`, make sure to run the migrations
task to check if any new migrations are required.
```console
mix txbox.gen.migrations
# If needed
mix ecto.migrate
```
## Usage
Once up an running, using Txbox is simple. The `Txbox` modules provides four
CRUD-like functions for managing transactions: `create/2`, `update/2`,
`find/2` and `all/2`.
To add a transaction to Txbox, the minimum required is to give a `txid`.
iex> Txbox.create(%{
...> txid: "6dfccf46359e033053ab1975c1e008ddc98560f591e8ed1c8bd051050992c110"
...> })
{:ok, %Tx{}}
When a channel name is ommitted, transactions are added to the `default_channel/0`
(`"txbox"`), but by specifiying a channel name as the first argument, the
transaction will be added to that channel. You can provide additional metadata
about the transaction, as well as attach the raw transaction binary.
iex> Txbox.create("photos", %{
...> txid: "6dfccf46359e033053ab1975c1e008ddc98560f591e8ed1c8bd051050992c110",
...> rawtx: <<...>>,
...> tags: ["hubble", "universe"],
...> meta: %{
...> title: "Hubble Ultra-Deep Field"
...> },
...> data: %{
...> bitfs: "https://x.bitfs.network/6dfccf46359e033053ab1975c1e008ddc98560f591e8ed1c8bd051050992c110.out.0.3"
...> }
...> })
{:ok, %Tx{}}
The transaction can be retrieved by the `txid`.
iex> Txbox.find("6dfccf46359e033053ab1975c1e008ddc98560f591e8ed1c8bd051050992c110")
{:ok, %Tx{}}
As before, omitting the channel scopes the query to the `default_channel/0`
(`"txbox"`). Alterntively you can pass the channel name as the first argument,
or use `"_"` which is the TXT syntax for global scope.
iex> Txbox.find("_", "6dfccf46359e033053ab1975c1e008ddc98560f591e8ed1c8bd051050992c110")
{:ok, %Tx{}}
A list of transactions can be returned using `all/2`. The second parameter
must be a `t:map/0` of query parameters to filter and search by.
iex> Txbox.all("photos", %{
...> from: 636400,
...> tagged: "hubble",
...> limit: 5
...> })
{:ok, [%Tx{}, ...]}
A full text search can be made by using the `:search` filter parameter.
iex> Txbox.all("_", %{
...> search: "hubble deep field"
...> })
{:ok, [%Tx{}, ...]}
### Filtering and searching
Txbox adopts the same syntax and query modifiers [used by TXT](https://txt.network/#/?id=c-queries).
Txbox automatically normalizes the query map, so keys can be specifiied either
as atoms or strings. Here are a few examples:
* `:search` - Full text search made on trasactions' tags and meta data
* `%{search: "hubble deep field"}`
* `:tagged` - Filter transactions by the given tag or tags
* `%{tagged: "photos"}` - all transactions tagged with "photos"
* `%{tagged: ["space", "hubble"]}` - all transactions tagged with *both* "space" and "hubble"
* `%{tagged: "space, hubble"}` - as above, but given as a comma seperated string
* `:from` - The block height from which to filter transactions by
* `%{from: 636400}` - all transactions from and including block 636400
* `:to` - The block height to which to filter transactions by
* `%{to: 636800}` - all transactions upto and including block 636800
* `%{from: 636400, to: 636800}` - all transactions in the range 636400 to 636800
* `:at` - The block height at which to filter transactions by exactly
* `%{at: 636500}` - all transactions at block 636500
* `%{at: "null"}` - all transactions without a block height (unconfirmed)
* `%{at: "!null"}` - all transactions with any block height (confirmed)
* `:order` - The attribute to sort transactions by
* `%{order: "i"}` - sort by block height in ascending order
* `%{order: "-i"}` - sort by block height in descending order
* `%{order: "created_at"}` - sort by insertion time in ascending order
* `%{order: "-created_at"}` - sort by insertion time in descending order
* `:limit` - The maximum number of transactions to return
* `:offset` - The start offset from which to return transactions (for pagination)
## Transaction state machine and miner API integration
Under the hood, Txbox is packed with a powerful state machine with automatic
miner API integration.

When creating a new transaction, you can set its state to one of the
following values.
* `"pending"` - If no state is specified, the default state is `"pending"`.
Pending transactions can be considered draft or incomplete transactions. Draft
transactions can be updated, and will not be pushed to miners unless the state
changes.
* `"queued"` - Under the `"queued"` state, a transaction will be asynchronously
pushed to the configured miner API in the background. Depending on the miner
response, the state will transition to `"pushed"` or `"failed"`.
* `"pushed"` - If the state is specified as `"pushed"`, this tells Txbox the
transaction is already accepted by miners. In the background, Txbox will poll
the configured miner API until a response confirms the transaction is in a
block.
The miner API queue and processing occurs automatically in a background
process, run under your application's supervision tree. For details refer to
`Txbox.Mapi.Queue` and `Txbox.Mapi.Processor`.
Each historic miner API response is saved associated to the transaction. The
most recent response is always preloaded with the transaction. This allows
you to inspect any messages or errors given by miners.
iex> {:ok, tx} = Txbox.find("6dfccf46359e033053ab1975c1e008ddc98560f591e8ed1c8bd051050992c110")
iex> tx.status
%Txbox.Transactions.MapiResponse{
type: "push",
payload: %{
"return_result" => "failure",
"return_description" => "Not enough fees",
...
},
public_key: "<KEY>",
signature: "3045022100c8e7f9369545b89c978afc13cc19fc6dd6e1cd139d363a6b808141e2c9fccd2e02202e12f4bf91d10bf7a45191e6fe77f50d7b5351dae7e0613fecc42f61a5736af8",
verified: true
}
"""
@doc false
use Supervisor
alias Txbox.Transactions
alias Txbox.Transactions.Tx
@default_channel "txbox"
@doc """
Returns the default channel (`"txbox"`).
"""
@spec default_channel() :: String.t
def default_channel(), do: @default_channel
@doc """
Starts the Txbox process linked to the current process.
"""
@spec start_link(keyword) :: Supervisor.on_start
def start_link(options) do
Supervisor.start_link(__MODULE__, options, name: __MODULE__)
end
@impl true
def init(opts) do
children = [
{Txbox.Mapi.Queue, opts},
{Txbox.Mapi.Processor, opts}
]
Supervisor.init(children, strategy: :one_for_one)
end
@doc false
def all(query \\ %{})
def all(query) when is_map(query), do: all(@default_channel, query)
def all(channel) when is_binary(channel), do: all(channel, %{})
@doc """
Finds a list of transactions, scoped by the specified channel and/or filtered
by the map of query options.
If the channel is omitted, it defaults to `default_channel/0`. Alernatively,
the channel can be specified as `"_"` which is the TXT syntax for the global
scope.
## Query options
The accepted query options are: (keys can be atoms or strings)
* `:search` - Full text search made on trasactions' tags and meta data
* `:tagged` - Filter transactions by the given tag or tags
* `:from` - The block height from which to filter transactions by
* `:to` - The block height to which to filter transactions by
* `:at` - The block height at which to filter transactions by exactly
* `:order` - The attribute to sort transactions by
* `:limit` - The maximum number of transactions to return
* `:offset` - The start offset from which to return transactions (for pagination)
* `:rawtx` - Include the full rawtx in the query response (defaults false)
## Examples
Find all transactions from the specified block height in the default channel (`"txbox"`)
iex> Txbox.all(%{from: 636400})
{:ok, [%Tx{}, ...]}
Find all transactions in the specified channel with a combination of filters
iex> Txbox.all("photos", %{from: 636400, tagged: "hubble", limit: 5})
{:ok, [%Tx{}, ...]}
Find all transactions in any channel unfiltered
iex> Txbox.all("_")
{:ok, [%Tx{}, ...]}
Make full text search against the transactions' meta data and tag names.
iex> Txbox.all(%{search: "hubble deep field"})
{:ok, [%Tx{}, ...]}
"""
@spec all(String.t, map) :: {:ok, list(Tx.t)}
def all(channel, %{} = query) do
txns = Tx
|> Transactions.channel(channel)
|> Transactions.list_tx(query)
{:ok, txns}
end
@doc false
def find(txid) when is_binary(txid),
do: find(@default_channel, txid)
@doc false
def find(txid, options) when is_binary(txid) and is_list(options),
do: find(@default_channel, txid, options)
@doc """
Finds a transaction by it's txid, scoped by the specified channel.
If the channel is omitted, it defaults to `default_channel/0`. Alernatively,
the channel can be specified as `"_"` which is the TXT syntax for the global
scope.
## Options
The accepted options are:
* `:rawtx` - Include the full rawtx in the query response (defaults false)
## Examples
Find within the default channel (`"txbox"`)
iex> Txbox.find("6dfccf46359e033053ab1975c1e008ddc98560f591e8ed1c8bd051050992c110")
{:ok, %Tx{}}
Find within the specified channel
iex> Txbox.find("photos", "6dfccf46359e033053ab1975c1e008ddc98560f591e8ed1c8bd051050992c110")
{:ok, %Tx{}}
Find within the global scope
iex> Txbox.find("_", "6dfccf46359e033053ab1975c1e008ddc98560f591e8ed1c8bd051050992c110")
{:ok, %Tx{}}
"""
@spec find(String.t, String.t, keyword) :: {:ok, Tx.t} | {:error, :not_found}
def find(channel, txid, options \\ [])
when is_binary(channel)
and is_binary(txid)
do
rawtx = Keyword.get(options, :rawtx)
tx = Tx
|> Transactions.channel(channel)
|> Transactions.query(%{rawtx: rawtx})
|> Transactions.get_tx(txid)
case tx do
%Tx{} = tx ->
{:ok, tx}
nil ->
{:error, :not_found}
end
end
@doc """
Adds the given transaction parameters into Txbox, within the specified channel.
If the channel is omitted, it defaults to `default_channel/0`.
## Examples
Add a transaction txid within the default channel (`"txbox"`).
iex> Txbox.create(%{
...> txid: "6dfccf46359e033053ab1975c1e008ddc98560f591e8ed1c8bd051050992c110"
...> })
{:ok, %Tx{}}
Add a transaction with associated meta data, within a specified channel.
iex> Txbox.create("photos", %{
...> txid: "6dfccf46359e033053ab1975c1e008ddc98560f591e8ed1c8bd051050992c110",
...> tags: ["hubble", "universe"],
...> meta: %{
...> title: "Hubble Ultra-Deep Field"
...> },
...> data: %{
...> bitfs: "https://x.bitfs.network/6dfccf46359e033053ab1975c1e008ddc98560f591e8ed1c8bd051050992c110.out.0.3"
...> }
...> })
{:ok, %Tx{}}
"""
@spec create(String.t, map) :: {:ok, Tx.t} | {:error, Ecto.Changeset.t}
def create(channel \\ @default_channel, %{} = attrs) do
attrs = Map.put(attrs, :channel, channel)
case Transactions.create_tx(attrs) do
{:ok, %Tx{} = tx} ->
mapi_queue_push(tx)
{:ok, tx}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Updates the given transaction with the updated parameters.
It is only possible to update `"pending"` state transactions, otherwise the
function will return an error.
## Examples
iex> {:ok, tx} = Txbox.create(%{
...> txid: "6dfccf46359e033053ab1975c1e008ddc98560f591e8ed1c8bd051050992c110",
...> meta: %{title: "Hubble Ultra-Deep Field"}
...> })
iex>
iex> Txbox.update(tx, %{
...> tags: ["hubble", "universe"]
...> })
{:ok, %Tx{
txid: "6dfccf46359e033053ab1975c1e008ddc98560f591e8ed1c8bd051050992c110",
tags: ["hubble", "universe"],
meta: %{
title: "Hubble Ultra-Deep Field"
}
}}
"""
@spec update(Tx.t, map) :: {:ok, Tx.t} | {:error, Ecto.Changeset.t}
def update(%Tx{} = tx, %{} = attrs) do
case Transactions.update_tx(tx, attrs) do
{:ok, %Tx{} = tx} ->
mapi_queue_push(tx)
{:ok, tx}
{:error, reason} ->
{:error, reason}
end
end
# Enqueues the transaction for mAPI processing if it has the correct state
defp mapi_queue_push(%Tx{state: state} = tx)
when state == "queued"
or state == "pushed",
do: Txbox.Mapi.Queue.push(tx)
defp mapi_queue_push(%Tx{}), do: false
end
|
lib/txbox.ex
| 0.895643
| 0.868882
|
txbox.ex
|
starcoder
|
defmodule Ockam.Hub.Service.Discovery do
@moduledoc """
Discovery service storing information about other services
Options:
storage: storage module to use, default is `Ockam.Hub.Service.Discovery.Storage.Memory`
storage_options: options to call storage.init/1 with
"""
use Ockam.Worker
alias Ockam.Bare.Extended, as: BareExtended
alias Ockam.Hub.Service.Discovery.ServiceInfo
alias Ockam.Message
alias Ockam.Router
require Logger
@impl true
def setup(options, state) do
storage = Keyword.get(options, :storage, Ockam.Hub.Service.Discovery.Storage.Memory)
storage_options = Keyword.get(options, :storage_options, [])
{:ok, Map.put(state, :storage, {storage, storage.init(storage_options)})}
end
@impl true
def handle_message(message, state) do
result =
case parse_request(message) do
:list ->
list(state)
{:get, id} ->
get(id, state)
{:register, id, route, metadata} ->
## Don't reply to register request
## TODO: register API with replies
case register(id, route, metadata, state) do
{:ok, state} ->
{:noreply, state}
other ->
other
end
other ->
Logger.warn(
"Unable to parse message payload: #{inspect(message)} reason: #{inspect(other)}"
)
{:noreply, state}
end
reply(result, message)
end
def with_storage(state, fun) do
{storage_mod, storage_state} = Map.get(state, :storage)
{result, new_storage_state} = fun.(storage_mod, storage_state)
{result, Map.put(state, :storage, {storage_mod, new_storage_state})}
end
def list(state) do
with_storage(state, fn storage_mod, storage_state ->
storage_mod.list(storage_state)
end)
end
def get(id, state) do
with_storage(state, fn storage_mod, storage_state ->
storage_mod.get(id, storage_state)
end)
end
def register(id, route, metadata, state) do
with_storage(state, fn storage_mod, storage_state ->
storage_mod.register(id, route, metadata, storage_state)
end)
end
def parse_request(message) do
payload = Message.payload(message)
case payload do
<<0>> <> request_v0 ->
## TODO: better way to encode request data??
case BareExtended.decode(request_v0, request_schema()) do
{:ok, {:list, ""}} ->
:list
{:ok, {:get, id}} ->
{:get, id}
{:ok, {:register, %{id: id, metadata: metadata}}} ->
## Using message return route as a route in register request.
{:register, id, Message.return_route(message), metadata}
other ->
other
end
other ->
{:error, {:invalid_request_version, other}}
end
end
def reply({:noreply, state}, _message) do
{:ok, state}
end
def reply({reply, state}, message) do
Router.route(Message.reply(message, state.address, format_reply(reply)))
{:ok, state}
end
def format_reply(reply) do
## TODO: maybe use better distinction between results (request id/function?)
formatted =
case reply do
{:ok, service_info} ->
encode_service_info(service_info)
[] ->
encode_service_infos([])
[%ServiceInfo{} | _] = list ->
encode_service_infos(list)
:ok ->
## TODO: meaningful response for registration
""
{:error, _reason} ->
## TODO: error encoding
""
end
<<0>> <> formatted
end
## BARE schemas
def request_schema() do
[
list: {:data, 0},
get: :string,
register: {:struct, [id: :string, metadata: {:map, :string, :data}]}
]
end
## To be used with this schema, routes should be normalized to (type, value) maps
## TODO: improve encode/decode logic to work with other address formats
def service_info_schema() do
{:struct,
[
id: :string,
route: Ockam.Wire.Binary.V2.bare_spec(:route),
metadata: {:map, :string, :data}
]}
end
## TODO: come up with better API for encoding/decoding of routes
def encode_service_info(service_info) do
service_info = normalize_service_info(service_info)
:bare.encode(service_info, service_info_schema())
end
def encode_service_infos(service_infos) do
service_infos =
Enum.map(service_infos, fn service_info -> normalize_service_info(service_info) end)
:bare.encode(service_infos, {:array, service_info_schema()})
end
def normalize_service_info(%{route: route} = service_info) do
normalized_route = Enum.map(route, fn address -> Ockam.Address.normalize(address) end)
Map.put(service_info, :route, normalized_route)
end
end
|
implementations/elixir/ockam/ockam_hub/lib/hub/service/discovery.ex
| 0.630685
| 0.41653
|
discovery.ex
|
starcoder
|
defmodule MerklePatriciaTree.Trie.Node do
@moduledoc """
This module encodes and decodes nodes from a
trie encoding back into RLP form. We effectively implement
`c(I, i)` from the Yellow Paper.
TODO: Add richer set of tests, esp. in re: storage and branch values.
"""
alias MerklePatriciaTree.Trie
alias MerklePatriciaTree.Trie.Storage
alias MerklePatriciaTree.DB
alias MerklePatriciaTree.Utils
@type trie_node ::
:empty
| {:leaf, [integer()], binary()}
| {:ext, [integer()], binary()}
| {:branch, [binary()]}
@doc """
Given a node, this function will encode the node
and put the value to storage (for nodes that are
greater than 32 bytes encoded). This implements
`c(I, i)`, Eq.(179) of the Yellow Paper.
## Examples
iex> trie = MerklePatriciaTree.Trie.new(MerklePatriciaTree.DB.ETS.random_ets_db())
iex> MerklePatriciaTree.Trie.Node.encode_node(:empty, trie)
<<128>>
iex> trie = MerklePatriciaTree.Trie.new(MerklePatriciaTree.DB.ETS.random_ets_db())
iex> encoded_node = MerklePatriciaTree.Trie.Node.encode_node({:leaf, [5,6,7], "ok"}, trie)
iex> ExRLP.decode(encoded_node)
["5g", "ok"]
iex> trie = MerklePatriciaTree.Trie.new(MerklePatriciaTree.DB.ETS.random_ets_db())
iex> encoded_node = MerklePatriciaTree.Trie.Node.encode_node({:branch, [<<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>]}, trie)
iex> ExRLP.decode(encoded_node)
["", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", ""]
iex> trie = MerklePatriciaTree.Trie.new(MerklePatriciaTree.DB.ETS.random_ets_db())
iex> MerklePatriciaTree.Trie.Node.encode_node({:ext, [1, 2, 3], <<>>}, trie)
<<31, 82, 144, 227, 4, 20, 0, 200, 58, 146, 224, 225, 151, 109, 242, 82, 125,152, 60, 185, 143, 246, 78, 21, 182, 104, 139, 99, 191, 188, 107, 140>>
"""
@spec encode_node(trie_node, Trie.t()) :: nil | binary()
def encode_node(trie_node, trie) do
trie_node
|> encode_node_type(trie)
|> Storage.put_node(trie)
end
@spec decode_node(list(), Trie.t()) :: trie_node
def decode_node(branches, trie) when length(branches) == 17 do
{:branch,
Enum.reduce(branches, [], fn
"", acc ->
acc ++ [""]
[_, _] = elem, acc ->
acc ++ [elem]
elem, acc when is_binary(elem) and byte_size(elem) == 32 ->
{:ok, node} = DB.get(trie.db, elem)
acc ++ [ExRLP.decode(node)]
elem, acc when is_binary(elem) ->
try do
## Ensure that the branch's value is not decoded
if length(acc) != 16 do
acc ++ [ExRLP.decode(elem)]
else
acc ++ [elem]
end
rescue
_ ->
acc ++ [elem]
end
end)}
end
def decode_node([hp_k, v], trie) do
{prefix, is_leaf} = HexPrefix.decode(hp_k)
if is_leaf do
{:leaf, prefix, v}
else
if is_binary(v) and byte_size(v) == 32 do
{:ok, rlp} = DB.get(trie.db, v)
{:ext, prefix, ExRLP.decode(rlp)}
else
{:ext, prefix, v}
end
end
end
defp encode_node_type({:leaf, key, value}, _trie) do
[HexPrefix.encode({key, true}), value]
end
defp encode_node_type({:branch, branches}, trie) when length(branches) == 17 do
Enum.reduce(branches, [], fn
"", acc ->
acc ++ [""]
elem, acc when is_list(elem) ->
encoded_elem = ExRLP.encode(elem)
if byte_size(encoded_elem) < 32 do
acc ++ [encoded_elem]
else
hash = Utils.hash(encoded_elem)
DB.put!(trie.db, hash, encoded_elem)
acc ++ [hash]
end
elem, acc ->
acc ++ [elem]
end)
end
defp encode_node_type({:ext, shared_prefix, next_node}, trie) when is_list(next_node) do
encode_node_type({:ext, shared_prefix, ExRLP.encode(next_node)}, trie)
end
defp encode_node_type({:ext, shared_prefix, {:branch, next_node}}, trie) do
encode_node_type({:ext, shared_prefix, ExRLP.encode(next_node)}, trie)
end
defp encode_node_type({:ext, shared_prefix, next_node}, trie) do
if byte_size(next_node) == 32 do
[HexPrefix.encode({shared_prefix, false}), next_node]
else
hash = Utils.hash(next_node)
MerklePatriciaTree.DB.put!(trie.db, hash, next_node)
[HexPrefix.encode({shared_prefix, false}), hash]
end
end
defp encode_node_type(:empty, _trie), do: ""
@doc """
Decodes the root of a given trie, effectively
inverting the encoding from `c(I, i)` defined in
Eq.(179) fo the Yellow Paper.
## Examples
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.DB.ETS.random_ets_db(), <<128>>)
iex> |> MerklePatriciaTree.Trie.Node.decode_trie()
:empty
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.DB.ETS.random_ets_db(), <<198, 130, 53, 103, 130, 111, 107>>)
iex> |> MerklePatriciaTree.Trie.Node.decode_trie()
{:leaf, [5,6,7], "ok"}
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.DB.ETS.random_ets_db(), <<209, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128>>)
iex> |> MerklePatriciaTree.Trie.Node.decode_trie()
{:branch, [<<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>, <<>>]}
iex> MerklePatriciaTree.Trie.new(MerklePatriciaTree.DB.ETS.random_ets_db(), <<196, 130, 17, 35, 128>>)
iex> |> MerklePatriciaTree.Trie.Node.decode_trie()
{:ext, [1, 2, 3], <<>>}
"""
@spec decode_trie(Trie.t()) :: trie_node
def decode_trie(trie) do
case Storage.get_node(trie) do
nil ->
:empty
<<>> ->
:empty
:not_found ->
:empty
node ->
decode_node(node, trie)
end
end
end
|
lib/trie/node.ex
| 0.65202
| 0.49585
|
node.ex
|
starcoder
|
defmodule MerklePatriciaTree.Trie.Builder do
@moduledoc """
Builder is responsible for adding keys to an
existing merkle trie. To add a key, we need to
make a delta to our trie that ends up as the canonical
form of the given tree as defined in http://gavwood.com/Paper.pdf.
Note: this algorithm is non-obvious, and hence why we have a good
number of functional and invariant tests. We should add more specific
unit tests to this module.
"""
alias MerklePatriciaTree.Trie
alias MerklePatriciaTree.Trie.Node
alias MerklePatriciaTree.ListHelper
@empty_branch <<>>
@doc """
Adds a key-value pair to a given trie.
This may radically change the structure of the trie.
"""
@spec put_key(Node.trie_node(), Trie.key(), ExRLP.t(), Trie.t()) :: Node.trie_node()
def put_key(trie_node, key, value, trie) do
trie_put_key(trie_node, key, value, trie)
end
# Merge into a leaf with identical key (overwrite)
defp trie_put_key({:leaf, old_prefix, _value}, new_prefix, new_value, _trie)
when old_prefix == new_prefix do
{:leaf, new_prefix, new_value}
end
# Merge leafs that share some prefix, this will cause us to construct an extension followed by a branch
defp trie_put_key(
{:leaf, [old_prefix_hd | _old_prefix_tl] = old_prefix, old_value},
[new_prefix_hd | _new_prefix_tl] = new_prefix,
new_value,
trie
)
when old_prefix_hd == new_prefix_hd do
{matching_prefix, old_tl, new_tl} = ListHelper.overlap(old_prefix, new_prefix)
branch =
[{old_tl, old_value}, {new_tl, new_value}] |> build_branch(trie) |> Node.encode_node(trie)
{:ext, matching_prefix, branch}
end
# Merge into a leaf with no matches (i.e. create a branch)
defp trie_put_key({:leaf, old_prefix, old_value}, new_prefix, new_value, trie) do
build_branch([{old_prefix, old_value}, {new_prefix, new_value}], trie)
end
# Merge leafs that share some prefix, this will cause us to construct an extension followed by a branch
defp trie_put_key(
{:ext, [old_prefix_hd | _old_prefix_tl] = old_prefix, old_value},
[new_prefix_hd | _new_prefix_tl] = new_prefix,
new_value,
trie
)
when old_prefix_hd == new_prefix_hd do
{matching_prefix, old_tl, new_tl} = ListHelper.overlap(old_prefix, new_prefix)
# TODO: Simplify logic?
if old_tl == [] do
# We are merging directly into an ext node (frustrating!)
# Since ext nodes must be followed by branches, let's just merge
# the new value into the branch
old_trie = old_value |> Trie.into(trie) |> Node.decode_trie()
new_encoded_trie = old_trie |> put_key(new_tl, new_value, trie) |> Node.encode_node(trie)
{:ext, matching_prefix, new_encoded_trie}
else
# TODO: Handle when we need to add an extension after this
# TODO: Standardize with below
first =
case old_tl do
# [] -> {16, {:encoded, old_value}} # TODO: Is this right?
[h | []] ->
{h, {:encoded, old_value}}
[h | t] ->
ext_encoded = {:ext, t, old_value} |> Node.encode_node(trie)
{h, {:encoded, ext_encoded}}
end
branch = [first, {new_tl, new_value}] |> build_branch(trie) |> Node.encode_node(trie)
{:ext, matching_prefix, branch}
end
end
# Merge into a ext with no matches (i.e. create a branch)
defp trie_put_key({:ext, old_prefix, old_value}, new_prefix, new_value, trie) do
# TODO: Standardize with above
first =
case old_prefix do
# [] -> {16, {:encoded, old_value}} # TODO: Is this right?
[h | []] ->
{h, {:encoded, old_value}}
[h | t] ->
ext_encoded = {:ext, t, old_value} |> Node.encode_node(trie)
{h, {:encoded, ext_encoded}}
end
build_branch([first, {new_prefix, new_value}], trie)
end
# Merge into a branch with empty prefix to store branch value
defp trie_put_key({:branch, branches}, [], value, _trie) when length(branches) == 17 do
{:branch, List.replace_at(branches, 16, value)}
end
# Merge down a branch node (recursively)
defp trie_put_key({:branch, branches}, [prefix_hd | prefix_tl], value, trie) do
{:branch,
List.update_at(branches, prefix_hd, fn branch ->
branch_node = branch |> Trie.into(trie) |> Node.decode_trie()
# Maybe this one?
branch_node |> put_key(prefix_tl, value, trie) |> Node.encode_node(trie)
end)}
end
# Merge into empty to create a leaf
defp trie_put_key(:empty, prefix, value, _trie) do
{:leaf, prefix, value}
end
# Builds a branch node with starter values
defp build_branch(branch_options, trie) do
base = {:branch, for(_ <- 0..15, do: @empty_branch) ++ [<<>>]}
Enum.reduce(branch_options, base, fn
{prefix, {:encoded, value}}, {:branch, branches} ->
{:branch, List.replace_at(branches, prefix, value)}
{prefix, value}, acc ->
put_key(acc, prefix, value, trie)
end)
end
end
|
lib/trie/builder.ex
| 0.652906
| 0.577555
|
builder.ex
|
starcoder
|
elixir_doc = """
Top level module providing convenience access to needed functions as well
as the very high level `Benchee.run` API.
Intended Elixir interface.
"""
erlang_doc = """
High-Level interface for more convenient usage from Erlang. Same as `Benchee`.
"""
for {module, moduledoc} <- [{Benchee, elixir_doc}, {:benchee, erlang_doc}] do
defmodule module do
@moduledoc moduledoc
alias Benchee.Formatter
@doc """
Run benchmark jobs defined by a map and optionally provide configuration
options.
Benchmarks are defined as a map where the keys are a name for the given
function and the values are the functions to benchmark. Users can configure
the run by passing a keyword list as the second argument. For more
information on configuration see `Benchee.Configuration.init/1`.
## Examples
Benchee.run(
%{
"My Benchmark" => fn -> 1 + 1 end,
"My other benchmrk" => fn -> [1] ++ [1] end
},
warmup: 2,
time: 3
)
"""
@spec run(map, keyword) :: any
def run(jobs, config \\ []) when is_list(config) do
config
|> Benchee.init()
|> Benchee.system()
|> add_benchmarking_jobs(jobs)
|> Benchee.collect()
|> Benchee.statistics()
|> Benchee.load()
|> Benchee.relative_statistics()
|> Formatter.output()
end
defp add_benchmarking_jobs(suite, jobs) do
Enum.reduce(jobs, suite, fn {key, function}, suite_acc ->
Benchee.benchmark(suite_acc, key, function)
end)
end
@doc """
See `Benchee.Configuration.init/1`
"""
defdelegate init(), to: Benchee.Configuration
@doc """
See `Benchee.Configuration.init/1`
"""
defdelegate init(config), to: Benchee.Configuration
@doc """
See `Benchee.System.system/1`
"""
defdelegate system(suite), to: Benchee.System
@doc """
See `Benchee.Benchmark.benchmark/3`
"""
defdelegate benchmark(suite, name, function), to: Benchee.Benchmark
@doc false
defdelegate benchmark(suite, name, function, printer), to: Benchee.Benchmark
@doc """
See `Benchee.Benchmark.collect/1`
"""
defdelegate collect(suite), to: Benchee.Benchmark
@doc false
defdelegate collect(suite, printer), to: Benchee.Benchmark
@doc """
See `Benchee.Statistics.statistics/1`
"""
defdelegate statistics(suite), to: Benchee.Statistics
@doc """
See `Benchee.RelativeStatistics.relative_statistics/1`
"""
defdelegate relative_statistics(suite), to: Benchee.RelativeStatistics
@doc """
See `Benchee.ScenarioLoader.load/1`
"""
defdelegate load(suite), to: Benchee.ScenarioLoader
end
end
|
lib/benchee.ex
| 0.899734
| 0.467696
|
benchee.ex
|
starcoder
|
defmodule BSV.PrivKey do
@moduledoc """
A PrivKey is a data structure representing a Bitcoin private key.
Internally, a private key is a secret 256-bit integer within the range of the
ECDSA `secp256k1` parmaeters. Each private key corresponds to a public key
which is a coordinate on the `secp256k1` curve.
"""
import BSV.Util, only: [decode: 2, encode: 2]
defstruct d: nil, compressed: true
@typedoc "Private key struct"
@type t() :: %__MODULE__{
d: privkey_bin(),
compressed: boolean()
}
@typedoc "Private key 256-bit binary"
@type privkey_bin() :: <<_::256>>
@typedoc """
Wallet Import Format private key
WIF encoded keys is a common way to represent private Keys in Bitcoin. WIF
encoded keys are shorter and include a built-in error checking and a type byte.
"""
@type privkey_wif() :: String.t()
@version_bytes %{
main: <<0x80>>,
test: <<0xEF>>
}
@doc """
Generates and returns a new `t:BSV.PrivKey.t/0`.
## Options
The accepted options are:
* `:compressed` - Denotes whether the correspding `t:BSV.PubKey.t/0` is compressed on not. Defaults `true`.
"""
@spec new(keyword()) :: t()
def new(opts \\ []) do
{_pubkey, privkey} = :crypto.generate_key(:ecdh, :secp256k1)
from_binary!(privkey, opts)
end
@doc """
Parses the given binary into a `t:BSV.PrivKey.t/0`.
Returns the result in an `:ok` / `:error` tuple pair.
## Options
The accepted options are:
* `:compressed` - Denotes whether the correspding `t:BSV.PubKey.t/0` is compressed on not. Defaults `true`.
* `:encoding` - Optionally decode the binary with either the `:base64` or `:hex` encoding scheme.
## Examples
iex> PrivKey.from_binary("<KEY>", encoding: :hex)
{:ok, %PrivKey{
d: <<60, 255, 4, 99, 48, 136, 98, 46, 69, 153, 220, 46, 191, 132, 63, 130, 206, 243, 70, 59, 145, 13, 52, 167, 82, 161, 54, 34, 171, 174, 55, 155>>
}}
"""
@spec from_binary(binary(), keyword()) :: {:ok, t()} | {:error, term()}
def from_binary(privkey, opts \\ []) when is_binary(privkey) do
encoding = Keyword.get(opts, :encoding)
compressed = Keyword.get(opts, :compressed, true)
case decode(privkey, encoding) do
{:ok, <<d::binary-32>>} ->
{:ok, struct(__MODULE__, d: d, compressed: compressed)}
{:ok, d} ->
{:error, {:invalid_privkey, byte_size(d)}}
{:error, error} ->
{:error, error}
end
end
@doc """
Parses the given binary into a `t:BSV.PrivKey.t/0`.
As `from_binary/2` but returns the result or raises an exception.
"""
@spec from_binary!(binary(), keyword()) :: t()
def from_binary!(privkey, opts \\ []) when is_binary(privkey) do
case from_binary(privkey, opts) do
{:ok, privkey} ->
privkey
{:error, error} ->
raise BSV.DecodeError, error
end
end
@doc """
Decodes the given `t:BSV.PrivKey.privkey_wif/0` into a `t:BSV.PrivKey.t/0`.
Returns the result in an `:ok` / `:error` tuple pair.
## Examples
iex> PrivKey.from_wif("<KEY>")
{:ok, %PrivKey{
d: <<60, 255, 4, 99, 48, 136, 98, 46, 69, 153, 220, 46, 191, 132, 63, 130, 206, 243, 70, 59, 145, 13, 52, 167, 82, 161, 54, 34, 171, 174, 55, 155>>
}}
"""
@spec from_wif(privkey_wif()) :: {:ok, t()} | {:error, term()}
def from_wif(wif) when is_binary(wif) do
version_byte = @version_bytes[BSV.network()]
case B58.decode58_check(wif) do
{:ok, {<<d::binary-32, 1>>, ^version_byte}} ->
{:ok, struct(__MODULE__, d: d, compressed: true)}
{:ok, {<<d::binary-32>>, ^version_byte}} ->
{:ok, struct(__MODULE__, d: d, compressed: false)}
{:ok, {<<d::binary>>, version_byte}} when byte_size(d) in [32,33] ->
{:error, {:invalid_base58_check, version_byte, BSV.network()}}
_error ->
{:error, :invalid_wif}
end
end
@doc """
Decodes the given `t:BSV.PrivKey.privkey_wif/0` into a `t:BSV.PrivKey.t/0`.
As `from_wif/1` but returns the result or raises an exception.
"""
@spec from_wif!(privkey_wif()) :: t()
def from_wif!(wif) when is_binary(wif) do
case from_wif(wif) do
{:ok, privkey} ->
privkey
{:error, error} ->
raise BSV.DecodeError, error
end
end
@doc """
Serialises the given `t:BSV.PrivKey.t/0` into a binary.
## Options
The accepted options are:
* `:encoding` - Optionally encode the binary with either the `:base64` or `:hex` encoding scheme.
## Examples
iex> PrivKey.to_binary(@privkey, encoding: :hex)
"<KEY>"
"""
@spec to_binary(t()) :: binary()
def to_binary(%__MODULE__{d: d}, opts \\ []) do
encoding = Keyword.get(opts, :encoding)
encode(d, encoding)
end
@doc """
Encodes the given `t:BSV.PrivKey.t/0` as a `t:BSV.PrivKey.privkey_wif/0`.
## Examples
iex> PrivKey.to_wif(@privkey)
"<KEY>"
"""
@spec to_wif(t()) :: privkey_wif()
def to_wif(%__MODULE__{d: d, compressed: compressed}) do
version_byte = @version_bytes[BSV.network()]
privkey_with_suffix = case compressed do
true -> <<d::binary, 0x01>>
false -> d
end
B58.encode58_check!(privkey_with_suffix, version_byte)
end
end
|
lib/bsv/priv_key.ex
| 0.879587
| 0.602793
|
priv_key.ex
|
starcoder
|
defmodule Dotenvy.Transformer do
@moduledoc """
This module provides functionality for converting string values to specific Elixir data types.
These conversions were designed to operate on system environment variables, which
_always_ store string binaries.
"""
defmodule Error do
@moduledoc false
defexception message: "non-empty value required"
end
@doc """
Converts strings into Elixir data types with support for nil-able values. Raises on error.
Each type determines how to interpret the incoming string, e.g. when the `type`
is `:integer`, an empty string is considered a `0`; when `:integer?` is the `type`,
and empty string is converted to `nil`.
Remember:
- Use a `?` suffix when an empty string should be considered `nil` (a.k.a. a "nullable" value).
- Use a `!` suffix when an empty string is not allowed. Use this when values are required.
## Types
The following types are supported:
- `:atom` - converts to an atom. An empty string will be the atom `:""` (!).
- `:atom?` - converts to an atom. An empty string will be considered `nil`
- `:atom!` - converts to an atom. An empty string will raise.
- `:boolean` - "false", "0", or an empty string "" will be considered boolean `false`. Any other non-empty value is considered `true`.
- `:boolean?` - as above, except an empty string will be considered `nil`
- `:boolean!` - as above, except an empty string will raise.
- `:charlist` - converts string to charlist.
- `:charlist?` - converts string to charlist. Empty string will be considered `nil`.
- `:charlist!` - as above, but an empty string will raise.
- `:integer` - converts a string to an integer. An empty string will be considered `0`.
- `:integer?` - as above, but an empty string will be considered `nil`.
- `:integer!` - as above, but an empty string will raise.
- `:float` - converts a string to an float. An empty string will be considered `0`.
- `:float?` - as above, but an empty string will be considered `nil`.
- `:float!` - as above, but an empty string will raise.
- `:existing_atom` - converts into an existing atom. Raises error if the atom does not exist.
- `:existing_atom?` - as above, but an empty string will be considered `nil`.
- `:existing_atom!` - as above, but an empty string will raise.
- `:module` - converts a string into an Elixir module name. Raises on error.
- `:module?` - as above, but an empty string will be considered `nil`.
- `:module!` - as above, but an empty string will raise.
- `:string` - no conversion (default)
- `:string?` - empty strings will be considered `nil`.
- `:string!` - as above, but an empty string will raise.
- custom function - see below.
## Custom Callback function
When you require more control over the transformation of your value than is possible
with the types provided, you can provide an arity 1 function in place of the type.
## Examples
iex> to!("debug", :atom)
:debug
iex> to!("", :boolean)
false
iex> to!("", :boolean?)
nil
iex> to!("5432", :integer)
5432
iex> to!("foo", fn val -> val <> "bar" end)
"foobar"
"""
@spec to!(str :: binary(), type :: atom) :: any()
def to!(str, :atom) when is_binary(str) do
str
|> String.trim_leading(":")
|> String.to_atom()
end
def to!("", :atom?), do: nil
def to!(str, :atom?), do: to!(str, :atom)
def to!("", :atom!), do: raise(Error)
def to!(str, :atom!), do: to!(str, :atom)
def to!(str, :boolean) when is_binary(str) do
str
|> String.downcase()
|> case do
"false" -> false
"0" -> false
"" -> false
_ -> true
end
end
def to!("", :boolean?), do: nil
def to!(str, :boolean?), do: to!(str, :boolean)
def to!("", :boolean!), do: raise(Error)
def to!(str, :boolean!), do: to!(str, :boolean)
def to!(str, :charlist) when is_binary(str), do: to_charlist(str)
def to!("", :charlist?), do: nil
def to!(str, :charlist?), do: to!(str, :charlist)
def to!("", :charlist!), do: raise(Error)
def to!(str, :charlist!), do: to!(str, :charlist)
def to!(str, :existing_atom) when is_binary(str) do
str
|> String.trim_leading(":")
|> String.to_existing_atom()
rescue
_ -> reraise(Error, "#{inspect(str)}: not an existing atom", __STACKTRACE__)
end
def to!("", :existing_atom?), do: nil
def to!(str, :existing_atom?), do: to!(str, :existing_atom)
def to!("", :existing_atom!), do: raise(Error)
def to!(str, :existing_atom!), do: to!(str, :existing_atom)
def to!("", :float), do: 0
def to!(str, :float) when is_binary(str) do
case Float.parse(str) do
:error ->
raise(Error, "Unparsable")
{value, _} ->
value
end
end
def to!("", :float?), do: nil
def to!(str, :float?), do: to!(str, :float)
def to!("", :float!), do: raise(Error)
def to!(str, :float!), do: to!(str, :float)
def to!("", :integer), do: 0
def to!(str, :integer) when is_binary(str) do
case Integer.parse(str) do
:error ->
raise(Error, "Unparsable")
{value, _} ->
value
end
end
def to!("", :integer?), do: nil
def to!(str, :integer?), do: to!(str, :integer)
def to!("", :integer!), do: raise(Error)
def to!(str, :integer!), do: to!(str, :integer)
def to!(str, :module) when is_binary(str) do
"Elixir.#{str}"
|> String.to_existing_atom()
end
def to!("", :module?), do: nil
def to!(str, :module?), do: to!(str, :module)
def to!("", :module!), do: raise(Error)
def to!(str, :module!), do: to!(str, :module)
def to!(str, :string) when is_binary(str), do: str
def to!("", :string?), do: nil
def to!(str, :string?) when is_binary(str), do: str
def to!("", :string!), do: raise(Error)
def to!(str, :string!) when is_binary(str), do: str
def to!(str, callback) when is_function(callback, 1) do
callback.(str)
end
def to!(str, _) when not is_binary(str), do: raise(Error, "Input must be a string.")
def to!(_, type), do: raise(Error, "Unknown type #{inspect(type)}")
end
|
lib/dotenvy/transformer.ex
| 0.918959
| 0.836488
|
transformer.ex
|
starcoder
|
defmodule Noizu.Scaffolding.EntityBehaviour do
@moduledoc """
This Behaviour provides some callbacks needed for the Noizu.ERP (EntityReferenceProtocol) to work smoothly.
Note the following naming conventions (where Path.To.Entity is the same path in each following case)
- Entities MyApp.(Path.To.Entity).MyFooEntity
- Tables MyApp.MyDatabase.(Path.To.Entity).MyFooTable
- Repos MyApp.(Path.To.Entity).MyFooRepo
If the above conventions are not used a framework user must provide the appropriate `mnesia_table`, and `repo_module` `use` options.
"""
alias Noizu.ElixirCore.CallingContext
#-----------------------------------------------------------------------------
# aliases, imports, uses,
#-----------------------------------------------------------------------------
require Logger
#-----------------------------------------------------------------------------
# Behaviour definition and types.
#-----------------------------------------------------------------------------
@type nmid :: integer | atom | String.t | tuple
@type entity_obj :: any
@type entity_record :: any
@type entity_tuple_reference :: {:ref, module, nmid}
@type entity_string_reference :: String.t
@type entity_reference :: entity_obj | entity_record | entity_tuple_reference | entity_string_reference
@type details :: any
@type error :: {:error, details}
@type options :: Map.t | nil
@doc """
Return identifier of ref, sref, entity or record
"""
@callback id(entity_reference) :: any | error
@doc """
Returns appropriate {:ref|:ext_ref, module, identifier} reference tuple
"""
@callback ref(entity_reference) :: entity_tuple_reference | error
@doc """
Returns appropriate string encoded ref. E.g. ref.user.1234
"""
@callback sref(entity_reference) :: entity_string_reference | error
@doc """
Returns entity, given an identifier, ref tuple, ref string or other known identifier type.
Where an entity is a EntityBehaviour implementing struct.
"""
@callback entity(entity_reference, options) :: entity_obj | error
@doc """
Returns entity, given an identifier, ref tuple, ref string or other known identifier type. Wrapping call in transaction if required.
Where an entity is a EntityBehaviour implementing struct.
"""
@callback entity!(entity_reference, options) :: entity_obj | error
@doc """
Returns record, given an identifier, ref tuple, ref string or other known identifier type.
Where a record is the raw mnesia table entry, as opposed to a EntityBehaviour based struct object.
"""
@callback record(entity_reference, options) :: entity_record | error
@doc """
Returns record, given an identifier, ref tuple, ref string or other known identifier type. Wrapping call in transaction if required.
Where a record is the raw mnesia table entry, as opposed to a EntityBehaviour based struct object.
"""
@callback record!(entity_reference, options) :: entity_record | error
@callback has_permission(entity_reference, any, any, options) :: boolean | error
@callback has_permission!(entity_reference, any, any, options) :: boolean | error
@doc """
Converts entity into record format. Aka extracts any fields used for indexing with the expected database table looking something like
```
%Table{
identifier: entity.identifier,
...
any_indexable_fields: entity.indexable_field,
...
entity: entity
}
```
The default implementation assumes table structure if simply `%Table{identifier: entity.identifier, entity: entity}` therefore you will need to
overide this implementation if you have any indexable fields. Future versions of the entity behaviour will accept an indexable field option
that will insert expected fields and (if indicated) do simple type casting such as transforming DateTime.t fields into utc time stamps or
`{time_zone, year, month, day, hour, minute, second}` tuples for efficient range querying.
"""
@callback as_record(entity_obj) :: entity_record | error
@doc """
Returns the string used for preparing sref format strings. E.g. a `User` struct might use the string ``"user"`` as it's sref_module resulting in
sref strings like `ref.user.1234`.
"""
@callback sref_module() :: String.t
@doc """
Cast from json to struct.
"""
@callback from_json(Map.t, CallingContext.t) :: any
@doc """
get entitie's repo module
"""
@callback repo() :: atom
#-----------------------------------------------------------------------------
# Defines
#-----------------------------------------------------------------------------
@methods [:id, :ref, :sref, :entity, :entity!, :record, :record!, :erp_imp, :as_record, :sref_module, :as_record, :from_json, :repo, :shallow, :miss_cb]
#-----------------------------------------------------------------------------
# Default Implementations
#-----------------------------------------------------------------------------
defmodule DefaultImplementation do
@callback ref_implementation(table :: Module, sref_prefix :: String.t) :: Macro.t
@callback miss_cb_implementation() :: Macro.t
@callback sref_implementation(table :: Module, sref_prefix :: String.t) :: Macro.t
@callback entity_implementation(table :: Module, repo :: Module) :: Macro.t
@callback entity_txn_implementation(table :: Module, repo :: Module) :: Macro.t
@callback record_implementation(table :: Module, repo :: Module) :: Macro.t
@callback record_txn_implementation(table :: Module, repo :: Module) :: Macro.t
@callback as_record_implementation(Module, options :: nil | Map.t) :: any
@callback expand_table(Module, Module) :: Module
@callback expand_repo(Module, Module) :: Module
@doc """
Noizu.ERP Implementation
"""
@callback erp_imp(table :: Module) :: Macro.t
def id_implementation(table, sref_prefix) do
quote do
@table unquote(__MODULE__).expand_table(__MODULE__, unquote(table))
def id(nil), do: nil
def id({:ref, __MODULE__, identifier} = _ref), do: identifier
def id(%__MODULE__{} = entity), do: entity.identifier
def id(unquote(sref_prefix) <> identifier), do: __MODULE__.ref(identifier) |> __MODULE__.id()
def id(%@table{} = record), do: record.identifier
end # end quote
end
def ref_implementation(table, sref_prefix) do
quote do
@table unquote(__MODULE__).expand_table(__MODULE__, unquote(table))
def ref(nil), do: nil
def ref({:ref, __MODULE__, _identifier} = ref), do: ref
def ref(identifier) when is_integer(identifier), do: {:ref, __MODULE__, identifier}
def ref(unquote(sref_prefix) <> identifier = _sref), do: ref(identifier)
def ref(identifier) when is_bitstring(identifier), do: {:ref, __MODULE__, String.to_integer(identifier)}
def ref(identifier) when is_atom(identifier), do: {:ref, __MODULE__, identifier}
def ref(%__MODULE__{} = entity), do: {:ref, __MODULE__, entity.identifier}
def ref(%@table{} = record), do: {:ref, __MODULE__, record.identifier}
def ref(any), do: raise "#{__MODULE__}.ref Unsupported item #{inspect any}"
end # end quote
end # end ref_implementation
def sref_implementation(table, sref_prefix) do
quote do
@table unquote(__MODULE__).expand_table(__MODULE__, unquote(table))
def sref(nil), do: nil
def sref(identifier) when is_integer(identifier), do: "#{unquote(sref_prefix)}#{identifier}"
def sref(unquote(sref_prefix) <> identifier = sref), do: sref
def sref(identifier) when is_bitstring(identifier), do: ref(identifier) |> sref()
def sref(identifier) when is_atom(identifier), do: "#{unquote(sref_prefix)}#{identifier}"
def sref(%__MODULE__{} = this), do: "#{unquote(sref_prefix)}#{this.identifier}"
def sref(%@table{} = record), do: "#{unquote(sref_prefix)}#{record.identifier}"
def sref(any), do: raise "#{__MODULE__}.sref Unsupported item #{inspect any}"
end # end quote
end # end sref_implementation
def miss_cb_implementation() do
quote do
def miss_cb(id, options \\ nil)
def miss_cb(id, _options), do: nil
end # end quote
end # end entity_implementation
def entity_implementation(table, repo) do
quote do
@table unquote(__MODULE__).expand_table(__MODULE__, unquote(table))
@repo unquote(__MODULE__).expand_repo(__MODULE__, unquote(repo))
def entity(item, options \\ nil)
def entity(nil, _options), do: nil
def entity(%__MODULE__{} = this, options), do: this
def entity(%@table{} = record, options), do: record.entity
def entity(identifier, options) do
@repo.get(__MODULE__.id(identifier), Noizu.ElixirCore.CallingContext.internal(), options) || __MODULE__.miss_cb(identifier, options)
end
end # end quote
end # end entity_implementation
def entity_txn_implementation(table, repo) do
quote do
@table unquote(__MODULE__).expand_table(__MODULE__, unquote(table))
@repo unquote(__MODULE__).expand_repo(__MODULE__, unquote(repo))
def entity!(item, options \\ nil)
def entity!(nil, _options), do: nil
def entity!(%__MODULE__{} = this, options), do: this
def entity!(%@table{} = record, options), do: record.entity
def entity!(identifier, options), do: @repo.get!(__MODULE__.ref(identifier) |> __MODULE__.id(), Noizu.ElixirCore.CallingContext.internal(), options) || __MODULE__.miss_cb(identifier, options)
end # end quote
end # end entity_txn_implementation
def record_implementation(table, repo) do
quote do
@table unquote(__MODULE__).expand_table(__MODULE__, unquote(table))
@repo unquote(__MODULE__).expand_repo(__MODULE__, unquote(repo))
def record(item, options \\ nil)
def record(nil, _options), do: nil
def record(%__MODULE__{} = this, options), do: __MODULE__.as_record(this)
def record(%@table{} = record, options), do: record
def record(identifier, options), do: __MODULE__.as_record(@repo.get(__MODULE__.ref(identifier) |> __MODULE__.id(), Noizu.ElixirCore.CallingContext.internal(), options)) |> __MODULE__.as_record()
end # end quote
end # end record_implementation
def record_txn_implementation(table, repo) do
quote do
@table unquote(__MODULE__).expand_table(__MODULE__, unquote(table))
@repo unquote(__MODULE__).expand_repo(__MODULE__, unquote(repo))
def record!(item, options \\ nil)
def record!(nil, _options), do: nil
def record!(%__MODULE__{} = this, options), do: __MODULE__.as_record(this)
def record!(%@table{} = record, options), do: record
def record!(identifier, options), do: @repo.get!(__MODULE__.ref(identifier) |> __MODULE__.id(), Noizu.ElixirCore.CallingContext.internal(), options) |> __MODULE__.as_record()
end # end quote
end # end record_txn_implementation
def erp_imp(table) do
quote do
parent_module = __MODULE__
mnesia_table = unquote(__MODULE__).expand_table(parent_module, unquote(table))
defimpl Noizu.ERP, for: [__MODULE__, mnesia_table] do
@parent_module parent_module
def id(o), do: @parent_module.id(o)
def ref(o), do: @parent_module.ref(o)
def sref(o), do: @parent_module.sref(o)
def entity(o, options \\ nil), do: @parent_module.entity(o, options)
def entity!(o, options \\ nil), do: @parent_module.entity!(o, options)
def record(o, options \\ nil), do: @parent_module.record(o, options)
def record!(o, options \\ nil), do: @parent_module.record!(o, options)
end
end # end quote
end # end erp_imp
def as_record_implementation(table, options) do
quote do
@mnesia_table unquote(__MODULE__).expand_table(__MODULE__, unquote(table))
@options unquote(options)
def as_record(nil), do: nil
if @options != nil do
if Map.has_key?(@options, :additional_fields) do
def as_record(this) do
base = %@mnesia_table{identifier: this.identifier, entity: this}
List.foldl(@options[:additional_fields], base,
fn(field, acc) ->
case Map.get(this, field, :erp_imp_field_not_found) do
:erp_imp_field_not_found -> acc
%DateTime{} = v -> Map.put(acc, field, DateTime.to_unix(v))
v -> Map.put(acc, field, v)
end
end
)
end
else
def as_record(this) do
%@mnesia_table{identifier: this.identifier, entity: this}
end
end
else
def as_record(this) do
%@mnesia_table{identifier: this.identifier, entity: this}
end
end
end # end quote
end # end as_record_implementation
def expand_table(module, table) do
# Apply Schema Naming Convention if not specified
if (table == :auto) do
path = Module.split(module)
default_database = Module.concat([List.first(path), "Database"])
root_table =
Application.get_env(:noizu_scaffolding, :default_database, default_database)
|> Module.split()
entity_name = path |> List.last()
table_name = String.slice(entity_name, 0..-7) <> "Table"
inner_path = Enum.slice(path, 1..-2)
Module.concat(root_table ++ inner_path ++ [table_name])
else
table
end
end #end expand_table
def expand_repo(module, repo) do
if (repo == :auto) do
rm = Module.split(module) |> Enum.slice(0..-2) |> Module.concat
m = (Module.split(module) |> List.last())
t = String.slice(m, 0..-7) <> "Repo"
Module.concat([rm, t])
else
repo
end
end # end expand_repo
end # end defmodule
#-----------------------------------------------------------------------------
# Using Implementation
#-----------------------------------------------------------------------------
defmacro __using__(options) do
# Only include implementation for these methods.
option_arg = Keyword.get(options, :only, @methods)
only = List.foldl(@methods, %{}, fn(method, acc) -> Map.put(acc, method, Enum.member?(option_arg, method)) end)
# Don't include implementation for these methods.
option_arg = Keyword.get(options, :override, [])
override = List.foldl(@methods, %{}, fn(method, acc) -> Map.put(acc, method, Enum.member?(option_arg, method)) end)
required? = List.foldl(@methods, %{}, fn(method, acc) -> Map.put(acc, method, only[method] && !override[method]) end)
# Repo module (entity/record implementation), Module name with "Repo" appeneded if :auto
repo_module = Keyword.get(options, :repo_module, :auto)
mnesia_table = Keyword.get(options, :mnesia_table, :auto)
as_record_options = Keyword.get(options, :as_record_options, Macro.escape(%{}))
# Default Implementation Provider
default_implementation = Keyword.get(options, :default_implementation, DefaultImplementation)
sm = Keyword.get(options, :sref_module, "unsupported")
sref_prefix = "ref." <> sm <> "."
quote do
import unquote(__MODULE__)
@behaviour Noizu.Scaffolding.EntityBehaviour
@expanded_repo unquote(default_implementation).expand_repo(__MODULE__, unquote(repo_module))
if unquote(required?.sref_module) do
def sref_module(), do: unquote(sm)
end
if unquote(required?.repo) do
def repo(), do: @expanded_repo
end
#-------------------------------------------------------------------------
# Default Implementation from default_implementation behaviour
#-------------------------------------------------------------------------
if unquote(required?.from_json) do
def from_json(json, context) do
@expanded_repo.from_json(json, context)
end
end
if unquote(required?.shallow) do
def shallow(identifier) do
%__MODULE__{identifier: identifier}
end
end
#unquote(Macro.expand(default_implementation, __CALLER__).prepare(mnesia_table, repo_module, sref_prefix))
if unquote(required?.id), do: unquote(Macro.expand(default_implementation, __CALLER__).id_implementation(mnesia_table, sref_prefix))
if unquote(required?.ref), do: unquote(Macro.expand(default_implementation, __CALLER__).ref_implementation(mnesia_table, sref_prefix))
if unquote(required?.sref), do: unquote(Macro.expand(default_implementation, __CALLER__).sref_implementation(mnesia_table, sref_prefix))
if unquote(required?.miss_cb), do: unquote(Macro.expand(default_implementation, __CALLER__).miss_cb_implementation())
if unquote(required?.entity), do: unquote(Macro.expand(default_implementation, __CALLER__).entity_implementation(mnesia_table, repo_module))
if unquote(required?.entity!), do: unquote(Macro.expand(default_implementation, __CALLER__).entity_txn_implementation(mnesia_table, repo_module))
if unquote(required?.record), do: unquote(Macro.expand(default_implementation, __CALLER__).record_implementation(mnesia_table, repo_module))
if unquote(required?.record!), do: unquote(Macro.expand(default_implementation, __CALLER__).record_txn_implementation(mnesia_table, repo_module))
if unquote(required?.erp_imp), do: unquote(Macro.expand(default_implementation, __CALLER__).erp_imp(mnesia_table))
if unquote(required?.as_record), do: unquote(Macro.expand(default_implementation, __CALLER__).as_record_implementation(mnesia_table, as_record_options))
@before_compile unquote(__MODULE__)
end # end quote
end #end defmacro __using__(options)
defmacro __before_compile__(_env) do
quote do
def has_permission(_ref, _permission, _context, _options), do: false
def has_permission!(_ref, _permission, _context, _options), do: false
end # end quote
end # end defmacro __before_compile__(_env)
end #end defmodule
|
lib/scaffolding/behaviours/entity_behaviour.ex
| 0.88056
| 0.712848
|
entity_behaviour.ex
|
starcoder
|
defmodule AdventOfCode.Y2020.Day18 do
@numbers 0..9 |> Enum.map(&Integer.to_string/1)
def test_data() do
"5 + (8 * 3 + 9 + 3 * 4 * 3)"
# "2 * 3 + (4 * 5)"
end
def run() do
AdventOfCode.Helpers.Data.read_from_file("2020/day18.txt")
|> Enum.map(&calculate/1)
|> Enum.sum()
end
def calculate(input) do
input
|> parse()
|> eval()
end
def parse(input) do
input
|> String.graphemes()
|> Enum.reject(fn c -> c == " " end)
|> tokenize([])
|> add_groups()
end
def tokenize(input), do: tokenize(input, [])
def tokenize([], built), do: Enum.reverse(built)
def tokenize([")" | rest], built), do: {Enum.reverse(built), rest}
def tokenize([peek | rest] = input, built) do
{a, rest} =
case peek do
"+" ->
{{:op, :add}, rest}
"*" ->
{{:op, :mult}, rest}
"(" ->
{g, r} = tokenize(rest, [])
{{:group, g}, r}
h when h in @numbers ->
read_number(input)
_ ->
{{:eeh, peek}, []}
end
tokenize(rest, [a | built])
end
def read_number(input) do
number = input |> Enum.take_while(fn x -> x in @numbers end)
nr = Enum.join(number) |> String.to_integer()
{{:number, nr}, Enum.drop_while(input, fn x -> x in @numbers end)}
end
def add_groups(l), do: add_groups(l, [])
def add_groups([{:group, group} | rest], done),
do: add_groups([add_groups(group, []) | rest], done)
def add_groups([left, {:op, :add}, {:group, right} | rest], done) do
add_groups([[left, {:op, :add}, add_groups(right, [])] | rest], done)
end
def add_groups([left, {:op, :add}, right | rest], done) do
add_groups([[left, {:op, :add}, right] | rest], done)
end
def add_groups([], done), do: Enum.reverse(done)
def add_groups([h | rest], done) do
add_groups(rest, [h | done])
end
def eval(l), do: eval(l, 0)
def eval([], mem), do: mem
def eval([group | rest], 0) when is_list(group),
do: eval(rest, eval(group, 0))
def eval([{:number, nr} | rest], 0), do: eval(rest, nr)
def eval([{:op, :mult} | rest], mem), do: mem * eval(rest, 0)
def eval([{:op, :add} | rest], mem), do: mem + eval(rest, 0)
end
|
lib/2020/day18.ex
| 0.5144
| 0.559049
|
day18.ex
|
starcoder
|
defmodule Redix do
@moduledoc """
This module provides the main API to interface with Redis.
## Overview
`start_link/2` starts a process that connects to Redis. Each Elixir process
started with this function maps to a client TCP connection to the specified
Redis server.
The architecture is very simple: when you issue commands to Redis (via
`command/3` or `pipeline/3`), the Redix process sends the command to Redis right
away and is immediately able to send new commands. When a response arrives
from Redis, only then the Redix process replies to the caller with the
response. This pattern avoids blocking the Redix process for each request (until
a response arrives), increasing the performance of this driver.
## Reconnections
Redix tries to be as resilient as possible: it tries to recover automatically
from most network errors.
If there's a network error when sending data to Redis or if the connection to Redis
drops, Redix tries to reconnect. The first reconnection attempt will happen
after a fixed time interval; if this attempt fails, reconnections are
attempted until successful, and the time interval between reconnections is
increased exponentially. Some aspects of this behaviour can be configured; see
`start_link/2` and the "Reconnections" page in the docs for more information.
All this behaviour is implemented using the
[connection](https://github.com/fishcakez/connection) library (a dependency of
Redix).
"""
# This module is only a "wrapper" module that exposes the public API alongside
# documentation for it. The real work is done in Redix.Connection and every
# function in this module goes through Redix.Connection.pipeline/3 one way or
# another.
@type command :: [binary]
@default_timeout 5000
@doc false
def child_spec(args)
def child_spec([]) do
child_spec([[], []])
end
def child_spec([uri_or_redis_opts]) do
child_spec([uri_or_redis_opts, []])
end
def child_spec([uri_or_redis_opts, connection_opts] = args)
when (is_binary(uri_or_redis_opts) or is_list(uri_or_redis_opts)) and
is_list(connection_opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, args},
type: :worker
}
end
@doc """
Starts a connection to Redis.
This function returns `{:ok, pid}` if the Redix process is started
successfully.
The actual TCP connection to the Redis server may happen either synchronously,
before `start_link/2` returns, or asynchronously: this behaviour is decided by
the `:sync_connect` option (see below).
This function accepts two arguments: the options to connect to the Redis
server (like host, port, and so on) and the options to manage the connection
and the resiliency. The Redis options can be specified as a keyword list or as
a URI.
## Redis options
### URI
In case `uri_or_redis_opts` is a Redis URI, it must be in the form:
redis://[:password@]host[:port][/db]
Here are some examples of valid URIs:
redis://localhost
redis://:secret@localhost:6397
redis://example.com:6380/1
Usernames before the password are ignored, so the these two URIs are
equivalent:
redis://:secret@localhost
redis://myuser:secret@localhost
The only mandatory thing when using URIs is the host. All other elements
(password, port, database) are optional and their default value can be found
in the "Options" section below.
### Options
The following options can be used to specify the parameters used to connect to
Redis (instead of a URI as described above):
* `:host` - (string) the host where the Redis server is running. Defaults to
`"localhost"`.
* `:port` - (integer) the port on which the Redis server is
running. Defaults to `6379`.
* `:password` - (string) the password used to connect to Redis. Defaults to
`nil`, meaning no password is used. When this option is provided, all Redix
does is issue an `AUTH` command to Redis in order to authenticate.
* `:database` - (integer or string) the database to connect to. Defaults to
`nil`, meaning don't connect to any database (Redis connects to database
`0` by default). When this option is provided, all Redix does is issue a
`SELECT` command to Redis in order to select the given database.
## Connection options
`connection_opts` is a list of options used to manage the connection. These
are the Redix-specific options that can be used:
* `:socket_opts` - (list of options) this option specifies a list of options
that are passed to `:gen_tcp.connect/4` when connecting to the Redis
server. Some socket options (like `:active` or `:binary`) will be
overridden by Redix so that it functions properly. Defaults to `[]`.
* `:sync_connect` - (boolean) decides whether Redix should initiate the TCP
connection to the Redis server *before* or *after* returning from
`start_link/2`. This option also changes some reconnection semantics; read
the "Reconnections" page in the docs.
* `:backoff_initial` - (integer) the initial backoff time (in milliseconds),
which is the time that will be waited by the Redix process before
attempting to reconnect to Redis after a disconnection or failed first
connection. See the "Reconnections" page in the docs for more information.
* `:backoff_max` - (integer) the maximum length (in milliseconds) of the
time interval used between reconnection attempts. See the "Reconnections"
page in the docs for more information.
* `:exit_on_disconnection` - (boolean) if `true`, the Redix server will exit
if it fails to connect or disconnects from Redis. Note that setting this
option to `true` means that the `:backoff_initial` and `:backoff_max` options
will be ignored. Defaults to `false`.
* `:log` - (keyword list) a keyword list of `{action, level}` where `level` is
the log level to use to log `action`. The possible actions and their default
values are:
* `:disconnection` (defaults to `:error`) - logged when the connection to
Redis is lost
* `:failed_connection` (defaults to `:error`) - logged when Redix can't
establish a connection to Redis
* `:reconnection` (defaults to `:info`) - logged when Redix manages to
reconnect to Redis after the connection was lost
In addition to these options, all options accepted by
`Connection.start_link/3` (and thus `GenServer.start_link/3`) are forwarded to
it. For example, a Redix connection can be registered with a name by using the
`:name` option:
Redix.start_link([], name: :redix)
Process.whereis(:redix)
#=> #PID<...>
## Examples
iex> Redix.start_link()
{:ok, #PID<...>}
iex> Redix.start_link(host: "example.com", port: 9999, password: "<PASSWORD>")
{:ok, #PID<...>}
iex> Redix.start_link([database: 3], [name: :redix_3])
{:ok, #PID<...>}
"""
@spec start_link(binary | Keyword.t(), Keyword.t()) :: GenServer.on_start()
def start_link(uri_or_redis_opts \\ [], connection_opts \\ [])
def start_link(uri, other_opts) when is_binary(uri) and is_list(other_opts) do
uri |> Redix.URI.opts_from_uri() |> start_link(other_opts)
end
def start_link(redis_opts, other_opts) do
Redix.Connection.start_link(redis_opts, other_opts)
end
@doc """
Closes the connection to the Redis server.
This function is synchronous and blocks until the given Redix connection frees
all its resources and disconnects from the Redis server. `timeout` can be
passed to limit the amout of time allowed for the connection to exit; if it
doesn't exit in the given interval, this call exits.
## Examples
iex> Redix.stop(conn)
:ok
"""
@spec stop(GenServer.server(), timeout) :: :ok
def stop(conn, timeout \\ :infinity) do
Redix.Connection.stop(conn, timeout)
end
@doc """
Issues a pipeline of commands on the Redis server.
`commands` must be a list of commands, where each command is a list of strings
making up the command and its arguments. The commands will be sent as a single
"block" to Redis, and a list of ordered responses (one for each command) will
be returned.
The return value is `{:ok, results}` if the request is successful, `{:error,
reason}` otherwise.
Note that `{:ok, results}` is returned even if `results` contains one or more
Redis errors (`Redix.Error` structs). This is done to avoid having to walk the
list of results (a `O(n)` operation) to look for errors, leaving the
responsibility to the user. That said, errors other than Redis errors (like
network errors) always cause the return value to be `{:error, reason}`.
If `commands` is an empty list (`[]`) or any of the commands in `commands` is
an empty command (`[]`) then an `ArgumentError` exception is raised right
away.
## Options
* `:timeout` - (integer or `:infinity`) request timeout (in
milliseconds). Defaults to `#{@default_timeout}`. If the Redis server
doesn't reply within this timeout, `{:error,
%Redix.ConnectionError{reason: :timeout}}` is returned.
## Examples
iex> Redix.pipeline(conn, [["INCR", "mykey"], ["INCR", "mykey"], ["DECR", "mykey"]])
{:ok, [1, 2, 1]}
iex> Redix.pipeline(conn, [["SET", "k", "foo"], ["INCR", "k"], ["GET", "k"]])
{:ok, ["OK", %Redix.Error{message: "ERR value is not an integer or out of range"}, "foo"]}
If Redis goes down (before a reconnection happens):
iex> {:error, error} = Redix.pipeline(conn, [["SET", "mykey", "foo"], ["GET", "mykey"]])
iex> error.reason
:closed
"""
@spec pipeline(GenServer.server(), [command], Keyword.t()) ::
{:ok, [Redix.Protocol.redis_value()]} | {:error, atom}
def pipeline(conn, commands, opts \\ []) do
assert_valid_pipeline_commands(commands)
Redix.Connection.pipeline(conn, commands, opts[:timeout] || @default_timeout)
end
@doc """
Issues a pipeline of commands to the Redis server, raising if there's an error.
This function works similarly to `pipeline/3`, except:
* if there are no errors in issuing the commands (even if there are one or
more Redis errors in the results), the results are returned directly (not
wrapped in a `{:ok, results}` tuple).
* if there's a connection error then a `Redix.ConnectionError` exception is raised.
For more information on why nothing is raised if there are one or more Redis
errors (`Redix.Error` structs) in the list of results, look at the
documentation for `pipeline/3`.
This function accepts the same options as `pipeline/3`.
## Options
* `:timeout` - (integer or `:infinity`) request timeout (in
milliseconds). Defaults to `#{@default_timeout}`. If the Redis server
doesn't reply within this timeout, `{:error,
%Redix.ConnectionError{reason: :timeout}}` is returned.
## Examples
iex> Redix.pipeline!(conn, [["INCR", "mykey"], ["INCR", "mykey"], ["DECR", "mykey"]])
[1, 2, 1]
iex> Redix.pipeline!(conn, [["SET", "k", "foo"], ["INCR", "k"], ["GET", "k"]])
["OK", %Redix.Error{message: "ERR value is not an integer or out of range"}, "foo"]
If Redis goes down (before a reconnection happens):
iex> Redix.pipeline!(conn, [["SET", "mykey", "foo"], ["GET", "mykey"]])
** (Redix.ConnectionError) :closed
"""
@spec pipeline!(GenServer.server(), [command], Keyword.t()) ::
[Redix.Protocol.redis_value()] | no_return
def pipeline!(conn, commands, opts \\ []) do
case pipeline(conn, commands, opts) do
{:ok, resp} ->
resp
{:error, error} ->
raise error
end
end
@doc """
Issues a command on the Redis server.
This function sends `command` to the Redis server and returns the response
returned by Redis. `pid` must be the pid of a Redix connection. `command` must
be a list of strings making up the Redis command and its arguments.
The return value is `{:ok, response}` if the request is successful and the
response is not a Redis error. `{:error, reason}` is returned in case there's
an error in the request (such as losing the connection to Redis in between the
request). If Redis returns an error (such as a type error), a `Redix.Error`
exception is raised; the reason for this is that these errors are semantic
errors that most of the times won't go away by themselves over time and users
of Redix should be notified of them as soon as possible. Connection errors,
instead, are often temporary errors that will go away when the connection is
back.
If the given command is an empty command (`[]`), an `ArgumentError`
exception is raised.
## Options
* `:timeout` - (integer or `:infinity`) request timeout (in
milliseconds). Defaults to `#{@default_timeout}`. If the Redis server
doesn't reply within this timeout, `{:error,
%Redix.ConnectionError{reason: :timeout}}` is returned.
## Examples
iex> Redix.command(conn, ["SET", "mykey", "foo"])
{:ok, "OK"}
iex> Redix.command(conn, ["GET", "mykey"])
{:ok, "foo"}
iex> Redix.command(conn, ["INCR", "mykey"])
{:error, "ERR value is not an integer or out of range"}
If Redis goes down (before a reconnection happens):
iex> {:error, error} = Redix.command(conn, ["GET", "mykey"])
iex> error.reason
:closed
"""
@spec command(GenServer.server(), command, Keyword.t()) ::
{:ok, Redix.Protocol.redis_value()} | {:error, atom | Redix.Error.t()}
def command(conn, command, opts \\ []) do
case pipeline(conn, [command], opts) do
{:ok, [%Redix.Error{} = error]} ->
raise error
{:ok, [resp]} ->
{:ok, resp}
{:error, _reason} = error ->
error
end
end
@doc """
Issues a command on the Redis server, raising if there's an error.
This function works exactly like `command/3` but:
* if the command is successful, then the result is returned not wrapped in a
`{:ok, result}` tuple.
* if there's a Redis error, a `Redix.Error` error is raised (with the
original message).
* if there's a connection error, a `Redix.ConnectionError`
error is raised.
This function accepts the same options as `command/3`.
## Options
* `:timeout` - (integer or `:infinity`) request timeout (in
milliseconds). Defaults to `#{@default_timeout}`. If the Redis server
doesn't reply within this timeout, `{:error,
%Redix.ConnectionError{reason: :timeout}}` is returned.
## Examples
iex> Redix.command!(conn, ["SET", "mykey", "foo"])
"OK"
iex> Redix.command!(conn, ["INCR", "mykey"])
** (Redix.Error) ERR value is not an integer or out of range
If Redis goes down (before a reconnection happens):
iex> Redix.command!(conn, ["GET", "mykey"])
** (Redix.ConnectionError) :closed
"""
@spec command!(GenServer.server(), command, Keyword.t()) ::
Redix.Protocol.redis_value() | no_return
def command!(conn, command, opts \\ []) do
case command(conn, command, opts) do
{:ok, resp} ->
resp
{:error, error} ->
raise error
end
end
defp assert_valid_pipeline_commands([] = _commands) do
raise ArgumentError, "no commands passed to the pipeline"
end
defp assert_valid_pipeline_commands(commands) when is_list(commands) do
Enum.each(commands, fn
[] ->
raise ArgumentError, "got an empty command ([]), which is not a valid Redis command"
[first | _] = command when first in ~w(SUBSCRIBE PSUBSCRIBE UNSUBSCRIBE PUNSUBSCRIBE) ->
raise ArgumentError,
"Redix doesn't support Pub/Sub commands; use redix_pubsub " <>
"(https://github.com/whatyouhide/redix_pubsub) for Pub/Sub " <>
"functionality support. Offending command: #{inspect(command)}"
command when is_list(command) ->
:ok
other ->
raise ArgumentError,
"expected a list of binaries as each Redis command, got: #{inspect(other)}"
end)
end
defp assert_valid_pipeline_commands(other) do
raise ArgumentError, "expected a list of Redis commands, got: #{inspect(other)}"
end
end
|
lib/redix.ex
| 0.91331
| 0.69035
|
redix.ex
|
starcoder
|
defmodule Gobstopper.API.Auth.Email do
@moduledoc """
Handles the management of email authorization credentials.
"""
@service Gobstopper.Service.Auth
@credential_type :email
alias Gobstopper.API.Auth
@doc """
Create a new identity initially associated with the given email credential.
Returns the session token on successful creation. Otherwise returns an
error.
"""
@spec register(String.t, String.t) :: { :ok, Auth.token } | { :error, String.t }
def register(email, pass), do: GenServer.call(@service, { :create, { @credential_type, { email, pass } } })
@doc """
Get the current email credential associated with the identity.
Returns the state of the credential if one exists or does not exist. Otherwise
returns an error.
"""
@spec get(Auth.token) :: { :ok, { :unverified | :verified, String.t } | { :none, nil } } | { :error, String.t }
def get(token) do
case GenServer.call(@service, { :all_credentials, token }) do
{ :ok, credentials } -> { :ok, credentials[@credential_type] }
error -> error
end
end
@doc """
Associate an email credential with the identity, replacing the old email
credential.
Returns `:ok` on successful creation. Otherwise returns an error.
"""
@spec set(Auth.token, String.t, String.t) :: :ok | { :error, String.t }
def set(token, email, pass) do
credential = { @credential_type, { email, pass } }
with { :error, _update_error } <- GenServer.call(@service, { :update, credential, token }),
{ :error, create_error } <- GenServer.call(@service, { :create, credential, token }) do
{ :error, create_error }
else
:ok -> :ok
end
end
@doc """
Remove the email credential associated with the identity.
Returns `:ok` on successful removal. Otherwise returns an error.
"""
@spec remove(Auth.token) :: :ok | { :error, String.t }
def remove(token), do: GenServer.call(@service, { :remove, { @credential_type }, token })
@doc """
Check if an email credential is associated with the identity.
Returns whether the credential exists or not, if successful. Otherwise returns
an error.
"""
@spec exists?(Auth.token) :: { :ok, boolean } | { :error, String.t }
def exists?(token), do: GenServer.call(@service, { :credential?, { @credential_type }, token })
@doc """
Login into an identity using the email credential.
Returns the session token on successful login. Otherwise returns an error.
"""
@spec login(String.t, String.t) :: { :ok, Auth.token } | { :error, String.t }
def login(email, pass), do: GenServer.call(@service, { :login, { @credential_type, { email, pass } } })
end
|
apps/gobstopper_api/lib/gobstopper.api/auth/email.ex
| 0.881239
| 0.428233
|
email.ex
|
starcoder
|
defmodule RenrakuWeb.Plugs.VerifyToken do
@behaviour Plug
@moduledoc """
Plug middleware to verify JWT tokens passed with the request and deny access if
no JWT was given.
The JWT needs to be issued by another application, signed with the valid RS256 key
and attached to the request in the `Authorization` header. It is customary to prepend
`Authorization` tokens with the string `"Bearer "` (so-called bearer tokens). This is
supported but not required, you can pass just the token.
If a valid token is passed with the request, its payload will be `assign`ed on the
conn under the key `:auth_payload` and the request goes through. If no valid token
is passed with the request, the request will be terminated with a 401 response.
In development, it is possible to disable this check and allow unauthorized request
to pass through by setting the environment variable `DISABLE_AUTH` to `"true"`.
"""
import Plug.Conn
alias Renraku.Token
@impl true
def init(options), do: options
@impl true
def call(conn, _options \\ []) do
case fetch_and_verify_token(conn) do
nil ->
if should_deny_access?() do
deny_access(conn)
else
assign_fake_user_data(conn)
end
%{"sub" => %{"id" => user_id} = payload} ->
conn
|> assign(:auth_payload, payload)
|> assign(:user_id, String.to_integer(user_id))
end
end
@is_prod Mix.env() == :prod
defp should_deny_access? do
@is_prod or System.get_env("DISABLE_AUTH") != "true"
end
defp deny_access(conn) do
conn
|> put_resp_content_type("text/plain")
|> send_resp(401, "Unauthorized")
|> halt()
end
defp assign_fake_user_data(conn) do
payload = %{"id" => "2137", "email" => "<EMAIL>"}
conn
|> assign(:user_id, 2137)
|> assign(:auth_payload, payload)
end
@spec fetch_and_verify_token(conn :: Plug.Conn.t()) :: nil | map
defp fetch_and_verify_token(%Plug.Conn{} = conn) do
case fetch_token(conn) do
nil ->
nil
token ->
case Token.verify_and_validate(token) do
{:ok, payload} ->
payload
_ ->
nil
end
end
end
defp fetch_token(conn) do
case get_req_header(conn, "authorization") do
["Bearer " <> token] ->
token
[token] ->
token
_ ->
nil
end
end
end
|
lib/renraku_web/plugs/verify_token.ex
| 0.762336
| 0.474875
|
verify_token.ex
|
starcoder
|
defmodule LdGraph2.Agent do
@moduledoc """
An agent that manages a graph, allowing key-value databases to be used
instead of a purposefully-built graph database.
Note that a few special graph operations like searching are not implemented
yet.
"""
use Agent
require Logger
# Increment when adding backward-incompatible version changes to the
# database delta format.
@curr_db_ver 1
@spec start_link(list) :: {:error, any} | {:ok, pid}
@doc """
Starts the agent, loading the specified graph off a Redis store.
"""
def start_link(kvstore_name: name, opts: opts) do
Agent.start_link(
fn ->
lname = "ld2." <> name
{:ok, llength} =
Redix.command(LdGraph2.Redix, [
"LLEN",
lname
])
{:ok, version_etf} = Redix.command(LdGraph2.Redix, ["GET", lname <> ".delta_versions"])
{lname, if version_etf do
version = check_version(:erlang.binary_to_term(version_etf, [:safe]))
cond do
llength < 1 ->
# Of course Redix can't handle null queries.
# Why would you do a query like that?
%LdGraph2.Graph{}
version ->
{:ok, raw_graph} =
Redix.transaction_pipeline(
LdGraph2.Redix,
Stream.repeatedly(fn ->
[
"LMOVE",
lname,
lname,
"LEFT",
"RIGHT"
]
end)
|> Enum.take(llength)
)
raw_graph
|> Stream.map(&:erlang.binary_to_term(&1, [:safe]))
|> Enum.reduce(%LdGraph2.Graph{}, &apply_delta/2)
true ->
raise Version.InvalidVersionError,
"Graph cache delta version isn't the " <>
"supported version '#{@curr_db_ver}'"
end
else
Redix.command(LdGraph2.Redix, ["SET", lname <> ".delta_versions", :erlang.term_to_binary(@curr_db_ver)])
%LdGraph2.Graph{}
end}
end,
opts
)
end
@spec get(atom | pid | {atom, any} | {:via, atom, any}) :: any
@doc """
Gets the current graph data.
"""
def get(agent) do
Agent.get(agent, fn {_name, graph} ->
graph
end)
end
@spec update(atom | pid | {atom, any} | {:via, atom, any}, any) :: :ok
@doc """
Applies a transaction/delta to a `LdGraph2.Graph`, saving to Redis.
## Examples
Listed below are a few examples of possible transactions. Each tuple is
processed left-to-right within the list.
```elixir
[{:add, {:node, 2}}]
[{:add, {:node, 0}}, {:add, {:node, 1}}, {:add, {:edge, 0, 1}}]
[{:del, {:edge, 0, 1}}]
```
"""
def update(agent, transactions) do
Agent.update(agent, fn {name, graph} ->
Redix.command(
LdGraph2.Redix,
List.flatten([
"RPUSH",
name,
transactions
|> Enum.map(&:erlang.term_to_binary/1)
])
)
{name,
transactions
|> Enum.reduce(graph, &apply_delta/2)}
end)
end
defp check_version(major) when major === @curr_db_ver do
true
end
defp check_version(major) do
Logger.error(
"LdGraph2 delta storage encoder #{major} isn't supported. Yours is #{@curr_db_ver}."
)
false
end
defp apply_delta(delta, graph) do
case delta do
{:add, what} ->
case what do
{:node, at} -> LdGraph2.Graph.put_node(graph, at)
{:edge, from, to} -> LdGraph2.Graph.put_edge(graph, from, to)
end
{:del, what} ->
case what do
{:node, at} -> LdGraph2.Graph.del_node(graph, at)
{:edge, from, to} -> LdGraph2.Graph.del_edge(graph, from, to)
end
end
end
end
|
apps/ld_graph2/lib/ld_graph2/agent.ex
| 0.887064
| 0.637623
|
agent.ex
|
starcoder
|
defmodule Tracex do
@moduledoc """
Tracex is a tool for static analysis of mix projects
It builds upon compiler tracing introduced in Elixir 1.10, simplifying
collection of traces and turning them into valuable insights.
Tracex collects traces emitted by Elixir compiler and performs some basic data
extraction and classification. The result, together with project's metadata built
along the way, is available to the developer for further analysis.
Tracex automatically recognizes some common types of modules present in mix
projects, like Ecto schemas or Phoenix controllers and views. Additionally it
supports attaching custom classifiers that are specific to your project in order to
collect extra information that may prove helpful in actual analysis.
Elixir compiler emits a lot of traces. For practical reasons tracex collets only
ones that are local to your project. It means that any traces of interactions of
your project's code with Elixir's core modules or external libraries are discarded.
"""
alias Tracex.Collector
alias Tracex.Insights
alias Tracex.Project
alias Tracex.Trace
alias Tracex.Tracer
@app :tracex
@manifest_vsn 0.1
@doc """
Compile a project and collect compiler traces for later analysis
Project's metadata is built along the way and written to disk together with
collected traces in manifest file. This enables the developer to load it into iex
console and play with it.
## Options
* `manifest_path` - path to manifest file,
defaults to `_build/{Mix.env}/lib/tracex/.mix/tracex`
* `custom_classifiers` - list of project-specific classifier modules
"""
@spec compile_project(list) :: {Project.t(), list(Trace.t())}
def compile_project(opts \\ []) do
project = Project.build_from_mix_project()
path = Keyword.get(opts, :manifest_path) || manifest_path()
classifiers = [Tracex.Classifier | Keyword.get(opts, :custom_classifiers, [])]
start_collector(project, [], classifiers)
Mix.Task.clear()
Mix.Task.run("compile", ["--force", "--tracer", Tracer])
{project, traces} = Collector.finalize()
Collector.stop()
write_manifest({project, traces}, path)
{project, traces}
end
@doc """
Returns module insights
See `Tracex.Insights` for more information.
"""
@spec insights(list(Trace.t()), atom | list(atom)) :: map
def insights(traces, module) do
Insights.module(traces, module)
end
@doc """
Loads tracex manifest file from disk
"""
@spec load_from_manifest(binary) :: {Project.t(), list(Trace.t())}
def load_from_manifest(path \\ manifest_path()) do
read_manifest(path)
end
defp start_collector(project, traces, classifiers) do
Collector.stop()
{:ok, _} = Collector.start_link(project, traces, classifiers)
end
defp manifest_path do
path = Mix.Project.manifest_path(app: @app, build_per_environment: true)
Path.join(path, "tracex")
end
defp write_manifest({project, traces}, path) do
data =
{@manifest_vsn, project, traces}
|> :erlang.term_to_binary()
File.write!(path, data)
end
defp read_manifest(path) do
manifest = path |> File.read!() |> :erlang.binary_to_term()
case manifest do
{@manifest_vsn, project, traces} ->
{project, traces}
{vsn, project, traces} ->
raise "Loaded manifest is in version #{vsn}, " <>
"current version is #{@manifest_vsn}. Please recompile."
{project, traces}
_ ->
raise "Cannot parse manifest file, please recompile."
end
end
end
|
lib/tracex.ex
| 0.788909
| 0.526647
|
tracex.ex
|
starcoder
|
defmodule Plaid.Accounts do
@moduledoc """
Functions for Plaid `accounts` endpoint.
"""
import Plaid, only: [make_request_with_cred: 4, get_cred: 0]
alias Plaid.Utils
defstruct accounts: [], item: nil, request_id: nil
@type t :: %__MODULE__{accounts: [Plaid.Accounts.Account.t],
item: Plaid.Item.t,
request_id: String.t}
@type params :: %{required(atom) => String.t | map}
@type cred :: %{required(atom) => String.t}
@endpoint "accounts"
defmodule Account do
@moduledoc """
Plaid Account data structure.
"""
defstruct account_id: nil, balances: nil, name: nil, mask: nil,
official_name: nil, type: nil, subtype: nil
@type t :: %__MODULE__{account_id: String.t,
balances: Plaid.Accounts.Account.Balance.t,
name: String.t,
mask: String.t,
official_name: String.t,
type: String.t,
subtype: String.t
}
defmodule Balance do
@moduledoc """
Plaid Account Balance data structure.
"""
defstruct available: nil, current: nil, limit: nil
@type t :: %__MODULE__{available: float, current: float, limit: float}
end
end
@doc """
Gets account data associated with Item.
Parameters
```
%{access_token: "access-token"}
```
"""
@spec get(params, cred | nil) :: {:ok, Plaid.Accounts.t} | {:error, Plaid.Error.t}
def get(params, cred \\ get_cred()) do
endpoint = "#{@endpoint}/get"
make_request_with_cred(:post, endpoint, cred, params)
|> Utils.handle_resp(:accounts)
end
@doc """
Gets balance for specifed accounts associated with Item.
Parameters
```
%{access_token: "access-token", options: %{account_ids: ["account-id"]}}
```
"""
@spec get_balance(params, cred | nil) :: {:ok, Plaid.Accounts.t} | {:error, Plaid.Error.t}
def get_balance(params, cred \\ get_cred()) do
endpoint = "#{@endpoint}/balance/get"
make_request_with_cred(:post, endpoint, cred, params)
|> Utils.handle_resp(:accounts)
end
end
|
lib/plaid/accounts.ex
| 0.785267
| 0.597989
|
accounts.ex
|
starcoder
|
defmodule Ockam.Wire.Binary.V1 do
@moduledoc false
@behaviour Ockam.Wire
alias Ockam.Address
alias Ockam.Message
@version 1
# TODO: refactor this.
def bare_spec(:address) do
{:struct, [type: :uint, value: :data]}
end
def bare_spec(:route) do
{:array, bare_spec(:address)}
end
def bare_spec(:message) do
{:struct,
[
version: :uint,
onward_route: bare_spec(:route),
return_route: bare_spec(:route),
payload: :data
]}
end
@doc """
Encodes a message into a binary.
Returns `{:ok, iodata}`, if it succeeds.
Returns `{:error, error}`, if it fails.
"""
@spec encode(message :: Message.t()) ::
{:ok, encoded :: iodata}
def encode(%Ockam.Message{} = message) do
onward_route = Message.onward_route(message)
return_route = Message.return_route(message)
payload = Message.payload(message)
## TODO: validate data and handle errors?
encoded =
:bare.encode(
%{
version: @version,
onward_route: normalize_route(onward_route),
return_route: normalize_route(return_route),
payload: payload
},
bare_spec(:message)
)
{:ok, encoded}
end
@doc """
Decodes a message from a binary.
Returns `{:ok, message}`, if it succeeds.
Returns `{:error, error}`, if it fails.
"""
@spec decode(encoded :: binary()) ::
{:ok, message :: Message.t()} | {:error, error :: any()}
def decode(encoded) do
## Expect first byte to be the version
case encoded do
<<@version, _rest::binary>> ->
case :bare.decode(encoded, bare_spec(:message)) do
{:ok, %{onward_route: onward_route, return_route: return_route} = decoded, ""} ->
{:ok,
struct(
Ockam.Message,
Map.merge(decoded, %{
onward_route: denormalize_route(onward_route),
return_route: denormalize_route(return_route)
})
)}
{:ok, _decoded, rest} ->
{:error, {:too_much_data, encoded, rest}}
{:error, reason} ->
{:error, reason}
end
<<wrong_version, _rest::binary>> ->
{:error, {:invalid_version, encoded, wrong_version}}
end
end
def encode_route(route) do
{:ok, :bare.encode(normalize_route(route), bare_spec(:route))}
end
def decode_route(encoded_route) do
case :bare.decode(encoded_route, bare_spec(:route)) do
{:ok, route, ""} ->
{:ok, denormalize_route(route)}
{:ok, _decoded, rest} ->
{:error, {:too_much_data, encoded_route, rest}}
{:error, reason} ->
{:error, reason}
end
end
def encode_address(address) do
{:ok, :bare.encode(Address.normalize(address), bare_spec(:address))}
end
def decode_address(encoded_address) do
case :bare.decode(encoded_address, bare_spec(:address)) do
{:ok, address, ""} ->
{:ok, Address.denormalize(address)}
{:ok, _decoded, rest} ->
{:error, {:too_much_data, encoded_address, rest}}
{:error, reason} ->
{:error, reason}
end
end
def normalize_route(route) when is_list(route) do
## TODO: check if all addresses are valid
Enum.map(route, &Address.normalize/1)
end
def denormalize_route(addresses) when is_list(addresses) do
Enum.map(addresses, &Address.denormalize/1)
end
end
|
implementations/elixir/ockam/ockam/lib/ockam/wire/binary/v1.ex
| 0.644001
| 0.419172
|
v1.ex
|
starcoder
|
defmodule Grizzly.ZWave.CommandClasses.DoorLock do
@moduledoc """
DoorLock Command Class
This command class provides commands that are used to operate and configure
door lock devices
"""
@behaviour Grizzly.ZWave.CommandClass
alias Grizzly.ZWave.DecodeError
@type mode ::
:unsecured
| :unsecured_with_timeout
| :unsecured_inside_door_handles
| :unsecured_inside_door_handles_with_timeout
| :unsecured_outside_door_handles
| :unsecured_outside_door_handles_with_timeout
| :secured
| :unknown
@type operation_type :: :constant_operation | :timed_operation
@type door_components :: :bolt | :latch | :door
@impl true
def byte(), do: 0x62
@impl true
def name(), do: :door_lock
@spec mode_to_byte(mode()) :: byte()
def mode_to_byte(:unsecured), do: 0x00
def mode_to_byte(:unsecured_with_timeout), do: 0x01
def mode_to_byte(:unsecured_inside_door_handles), do: 0x10
def mode_to_byte(:unsecured_inside_door_handles_with_timeout), do: 0x11
def mode_to_byte(:unsecured_outside_door_handles), do: 0x20
def mode_to_byte(:unsecured_outside_door_handles_with_timeout), do: 0x21
def mode_to_byte(:secured), do: 0xFF
# version >= 4
def mode_to_byte(:unknown), do: 0xFE
@spec mode_from_byte(byte()) :: {:ok, mode()} | {:error, DecodeError.t()}
def mode_from_byte(0x00), do: {:ok, :unsecured}
def mode_from_byte(0x01), do: {:ok, :unsecured_with_timeout}
def mode_from_byte(0x10), do: {:ok, :unsecured_inside_door_handles}
def mode_from_byte(0x11), do: {:ok, :unsecured_inside_door_handles_with_timeout}
def mode_from_byte(0x20), do: {:ok, :unsecured_outside_door_handles}
def mode_from_byte(0x21), do: {:ok, :unsecured_outside_door_handles_with_timeout}
def mode_from_byte(0xFF), do: {:ok, :secured}
# version >= 4
def mode_from_byte(0xFE), do: {:ok, :unknown}
def mode_from_byte(byte),
do: {:error, %DecodeError{value: byte, param: :mode}}
def operation_type_to_byte(:constant_operation), do: 0x01
def operation_type_to_byte(:timed_operation), do: 0x02
def operation_type_from_byte(0x01), do: {:ok, :constant_operation}
def operation_type_from_byte(0x02), do: {:ok, :timed_operation}
def operation_type_from_byte(byte),
do: {:error, %DecodeError{param: :operation_type, value: byte}}
def door_handles_to_bitmask(handles) do
<<bitmask::size(4)>> =
for handle <- 4..1, into: <<>> do
if handle in handles, do: <<0x01::1>>, else: <<0x00::1>>
end
bitmask
end
def door_handles_from_bitmask(byte) do
bitmask = <<byte::size(4)>>
for(<<x::1 <- bitmask>>, do: x)
|> Enum.reverse()
|> Enum.with_index(1)
|> Enum.reduce([], fn {bit, index}, acc ->
if bit == 1, do: [index | acc], else: acc
end)
end
def to_minutes_and_seconds(seconds) do
{div(seconds, 60), rem(seconds, 60)}
end
end
|
lib/grizzly/zwave/command_classes/door_lock.ex
| 0.797911
| 0.400192
|
door_lock.ex
|
starcoder
|
defmodule ExUnitFixtures.Teardown do
@moduledoc false
def start_link do
Agent.start_link(fn -> %{pids: %{}, teardowns: %{}} end, name: __MODULE__)
end
@doc """
Runs teardown for the module registered as `module_ref`.
"""
@spec run(reference) :: :ok
def run(module_ref) when is_reference(module_ref) do
__MODULE__
|> Agent.get_and_update(fn (%{teardowns: tds, pids: pids}) ->
{tds[module_ref], %{teardowns: Map.delete(tds, module_ref),
pids: Map.delete(pids, module_ref)}}
end)
|> Enum.each(&apply &1, [])
end
@doc """
Like `register_pid/2` but uses the current process `pid`
"""
@spec register_pid(reference) :: :ok
def register_pid(module_ref) when is_reference(module_ref) do
register_pid(module_ref, self)
end
@doc """
Associates `pid` with `module_ref`
"""
@spec register_pid(reference, pid) :: :ok
def register_pid(module_ref, pid)
when is_reference(module_ref)
and is_pid(pid) do
Agent.update(__MODULE__, fn (state = %{pids: pids, teardowns: tds}) ->
%{state | pids: Map.put(pids, pid, module_ref),
teardowns: Map.put(tds, module_ref, [])}
end)
end
@doc """
Registers a teardown function for the current test pid.
For the simple case of test-scoped-fixtures this defers to
`ExUnit.Callbacks.on_exit/1`. For module scoped fixtures, this will register
the function to run when all the modules tests are done.
"""
@spec register_teardown(:test | :module, fun) :: :ok
def register_teardown(scope \\ :test, fun)
def register_teardown(:test, fun) when is_function(fun, 0) do
ExUnit.Callbacks.on_exit(fun)
end
def register_teardown(:module, fun) when is_function(fun, 0) do
pid = self
Agent.update(__MODULE__, fn (state = %{teardowns: tds, pids: pids}) ->
unless Map.has_key?(pids, pid) do
raise "register_teardown/2 can only be invoked from the test process"
end
new_tds = Map.update!(tds, pids[pid], fn list -> [fun|list] end)
%{state | teardowns: new_tds}
end)
end
end
|
lib/ex_unit_fixtures/teardown.ex
| 0.696991
| 0.421046
|
teardown.ex
|
starcoder
|
defmodule Snowpack.Telemetry do
@moduledoc """
Telemetry integration.
Unless specified, all time's are in `:native` units.
Snowpack executes the following events:
* `[:snowpack, :query, :start]` - Executed at the start of each query sent to Snowflake.
#### Measurements
* `:system_time` - The time the query started
#### Metadata:
* `:query` - The query sent to the database as a string
* `:params` - The query parameters
* `[:snowpack, :query, :stop]` - Executed at the end of each query sent to Snowflake.
#### Measurements
* `:duration` - The time spent executing the query
#### Metadata:
* `:query` - The query sent to the database as a string
* `:params` - The query parameters
* `:result` - The query result (selected, updated)
* `:num_rows` - The number of rows effected by the query
* `:error` - Present if any error occurred while processing the query. (optional)
* `[:snowpack, :query, :exception]` - Executed if executing a query throws an exception.
#### Measurements
* `:duration` - The time spent executing the query
#### Metadata
* `:kind` - The type of exception.
* `:error` - Error description or error data.
* `:stacktrace` - The stacktrace
"""
@doc false
@spec start(atom, map, map) :: integer
# Emits a `start` telemetry event and returns the the start time
def start(event, meta \\ %{}, extra_measurements \\ %{}) do
start_time = System.monotonic_time()
:telemetry.execute(
[:snowpack, event, :start],
Map.merge(extra_measurements, %{system_time: System.system_time()}),
meta
)
start_time
end
@doc false
@spec stop(atom, number, map, map) :: :ok
# Emits a stop event.
def stop(event, start_time, meta \\ %{}, extra_measurements \\ %{}) do
end_time = System.monotonic_time()
measurements = Map.merge(extra_measurements, %{duration: end_time - start_time})
:telemetry.execute(
[:snowpack, event, :stop],
measurements,
meta
)
end
@doc false
@spec exception(atom, number, any, any, any, map, map) :: :ok
def exception(
event,
start_time,
kind,
reason,
stack,
meta \\ %{},
extra_measurements \\ %{}
) do
end_time = System.monotonic_time()
measurements = Map.merge(extra_measurements, %{duration: end_time - start_time})
meta =
meta
|> Map.put(:kind, kind)
|> Map.put(:error, reason)
|> Map.put(:stacktrace, stack)
:telemetry.execute([:snowpack, event, :exception], measurements, meta)
end
@doc false
@spec event(atom, number | map, map) :: :ok
# Used for reporting generic events
def event(event, measurements, meta) do
:telemetry.execute([:snowpack, event], measurements, meta)
end
end
|
lib/snowpack/telemetry.ex
| 0.88806
| 0.649481
|
telemetry.ex
|
starcoder
|
defmodule Blunt.Message.Options.Parser do
alias Blunt.Message.{Changeset, Metadata}
def parse_message_opts(message_module, opts) do
message_opts = Metadata.get(message_module, :options, [])
%{parsed: parsed, unparsed: unparsed} =
Enum.reduce(
message_opts,
%{parsed: [], unparsed: opts},
fn current_option, acc ->
{name, value} = parse_option(current_option, acc.unparsed)
%{acc | parsed: [{name, value} | acc.parsed], unparsed: Keyword.delete(acc.unparsed, name)}
end
)
case validate_options(parsed, message_opts) do
{:ok, parsed} ->
{:ok, Keyword.merge(unparsed, parsed)}
{:error, errors} ->
{:error, errors}
end
end
defp parse_option({name, _type, config}, provided_opts) do
default = Keyword.fetch!(config, :default)
{name, Keyword.get(provided_opts, name, default)}
end
defp validate_options(parsed_opts, supported_opts) do
required =
supported_opts
|> Enum.filter(fn {_, _type, config} -> Keyword.fetch!(config, :required) == true end)
|> Enum.map(&elem(&1, 0))
|> Enum.uniq()
data = %{}
types =
Enum.into(supported_opts, %{}, fn {name, type, config} ->
{name, ecto_type(type, config)}
end)
params = Enum.into(parsed_opts, %{})
changeset =
{data, types}
|> Ecto.Changeset.cast(params, Map.keys(types))
|> Ecto.Changeset.validate_required(required)
case changeset do
%{valid?: false} ->
{:error, Changeset.format_errors(changeset)}
%{valid?: true} = changset ->
validated =
changset
|> Ecto.Changeset.apply_changes()
|> Map.to_list()
{:ok, validated}
end
end
[
integer: :integer,
float: :float,
boolean: :boolean,
string: :string,
map: :map,
binary: :binary,
decimal: :decimal,
id: :id,
binary_id: Ecto.UUID,
utc_datetime: :utc_datetime,
naive_datetime: :naive_datetime,
date: :date,
time: :time,
any: :any,
utc_datetime_usec: :utc_datetime_usec,
naive_datetime_usec: :naive_datetime_usec,
time_usec: :time_usec
]
|> Enum.map(fn {type_hint, ecto_type} ->
def ecto_type(unquote(type_hint), _config), do: unquote(ecto_type)
def ecto_type({:array, unquote(type_hint)}, _config), do: {:array, unquote(ecto_type)}
def ecto_type({:map, unquote(type_hint)}, _config), do: {:map, unquote(ecto_type)}
end)
def ecto_type(:pid, _config), do: Blunt.Message.Type.Pid
def ecto_type(:atom, _config), do: Blunt.Message.Type.Atom
def ecto_type(:ecto_multi, _config), do: Blunt.Message.Type.Multi
def ecto_type({:array, :atom}, _config) do
{:array, Blunt.Message.Type.Atom}
end
def ecto_type(:enum, config) do
{:parameterized, Ecto.Enum, Ecto.Enum.init(values: Keyword.get(config, :values))}
end
def ecto_type({:array, :enum}, config) do
{:parameterized, {:array, Ecto.Enum}, Ecto.Enum.init(values: Keyword.get(config, :values))}
end
end
|
apps/blunt/lib/blunt/message/options/parser.ex
| 0.510008
| 0.404155
|
parser.ex
|
starcoder
|
defmodule Grizzly.SmartStart.MetaExtension.LocationInformation do
@moduledoc """
This extension is used to advertise the location assigned to the supporting node
The location string cannot contain underscores and cannot end with a dash.
The location string can contain a period (.) but a sublocation cannot end a
dash. For example:
```
123.123-.123
```
The above location invalid. To make it valid remove the `-` before `.`.
A node's location cannot be more than 62 bytes.
"""
@behaviour Grizzly.SmartStart.MetaExtension
@type t :: %__MODULE__{
location: String.t()
}
defstruct location: nil
@doc """
Make a new `LocationInformation.t()` from a location string
If the location contains characters that are not valid this function will return
`{:error, reason}` where `reason` is:
- `:contains_underscore`
- `:ends_with_dash`
- `:location_too_long`
- `:sublocation_ends_with_dash`
"""
@spec new(String.t()) ::
{:ok, t()}
| {:error,
:contains_underscore
| :ends_with_dash
| :location_too_long
| :sublocation_ends_with_dash}
def new(location) do
case validate_location(location) do
:ok ->
{:ok, %__MODULE__{location: location}}
error ->
error
end
end
@doc """
Make a `LocationInformation.t()` into a binary
"""
@impl Grizzly.SmartStart.MetaExtension
@spec to_binary(t()) :: {:ok, binary()}
def to_binary(%__MODULE__{location: location}) do
location_bin = location_to_binary(location)
{:ok, <<0x66, byte_size(location_bin)>> <> location_bin}
end
@doc """
Make a `LocationInformation.t()` from a binary
If the location contains characters that are not valid this function will return
`{:error, reason}` where `reason` is:
- `:contains_underscore`
- `:ends_with_dash`
- `:location_too_long`
- `:sublocation_ends_with_dash`
If the critical bit set in the binary this function will return
`{:error, :critical_bit_set}`
"""
@impl Grizzly.SmartStart.MetaExtension
@spec from_binary(binary) ::
{:ok, t()}
| {:error,
:contains_underscore
| :ends_with_dash
| :critical_bit_set
| :location_too_long
| :sublocation_ends_with_dash
| :invalid_binary}
def from_binary(<<0x33::size(7), 0x00::size(1), _length, location::binary>>) do
location_string =
location
|> to_string()
case validate_location(location_string) do
:ok -> {:ok, %__MODULE__{location: location_string}}
error -> error
end
end
def from_binary(<<0x33::size(7), 0x01::size(1), _rest::binary>>) do
{:error, :critical_bit_set}
end
def from_binary(_), do: {:error, :invalid_binary}
defp validate_location(location) when byte_size(location) < 63 do
with :ok <- contains_underscore?(location),
:ok <- ensure_no_dash_at_end(location),
:ok <- ensure_no_dash_at_end_of_sublocation(location) do
:ok
end
end
defp validate_location(_location) do
{:error, :location_too_long}
end
defp ensure_no_dash_at_end_of_sublocation(location) do
if String.contains?(location, "-.") do
{:error, :sublocation_ends_with_dash}
else
:ok
end
end
defp contains_underscore?(location) do
if String.contains?(location, "_") do
{:error, :contains_underscore}
else
:ok
end
end
defp ensure_no_dash_at_end(location) do
if String.ends_with?(location, "-") do
{:error, :ends_with_dash}
else
:ok
end
end
defp location_to_binary(location) do
location_list =
location
|> String.codepoints()
:erlang.list_to_binary(location_list)
end
end
|
lib/grizzly/smart_start/meta_extension/location_information.ex
| 0.913058
| 0.83868
|
location_information.ex
|
starcoder
|
defmodule Functor do
@moduledoc """
An implementation of functor-style error handling of `:ok`/`:error` tuples.
"""
@type result() :: {:ok, any()} | {:error, any()} | :error
@doc """
map implementation for functor for :ok, :error
## Examples
iex> Functor.f_map({:ok, [1, 2, 3]}, &hd/1)
{:ok, 1}
iex> Functor.f_map({:error, "This is an error"}, &hd/1)
{:error, "This is an error"}
iex> Functor.f_map(:error, &hd/1)
{:error, "Failed applying function via `Functor.f_map`"}
iex> Functor.f_map({:ok, [1, 2, 3]}, &hd/1)
...> |> Functor.f_map(&(10 + &1))
{:ok, 11}
"""
@spec f_map(result(),(any()->any())) :: result()
def f_map(x, f) do
case x do
{:ok, value} -> {:ok, f.(value)}
{:error, value} -> {:error, value}
_ -> {:error, "Failed applying function via `Functor.f_map`"}
end
end
@doc """
Reduce function applying wrapped values, with a quick escape for any error
## Examples
iex> Functor.reduce_f_map([{:ok, 1}, {:ok, 2}, {:ok, 3}], {:ok, 0}, &+/2)
{:ok, 6}
iex> Functor.reduce_f_map([{:ok, 1}, {:ok, 2}, {:error, "Failed"}], {:ok, 0}, &+/2)
{:error, "Failed"}
iex> Functor.reduce_f_map([{:ok, 1}, {:ok, 2}, :error], {:ok, 0}, &+/2)
{:error, "Failed applying function via `Functor.reduce_map`"}
"""
@spec reduce_f_map([result()], any(), (any(), any() -> any())) :: result()
def reduce_f_map([], acc, _f) do
acc
end
def reduce_f_map([x|xs], acc, f) do
case x do
{:ok, value} -> reduce_f_map(xs, f_map(acc, &(f.(value, &1))), f)
{:error, value} -> {:error, value}
_ -> {:error, "Failed applying function via `Functor.reduce_map`"}
end
end
@doc """
Tests for whether a result is `:ok`
## Examples
iex> Functor.is_ok?({:ok, 12})
true
iex> Functor.is_ok?(12)
false
iex> Functor.is_ok?(:error)
false
iex> Functor.is_ok?({:error, "This is an error."})
false
"""
@spec is_ok?(result()) :: boolean()
def is_ok?({:ok, _}), do: true
def is_ok?(_), do: false
@doc """
Tests for whether a result is not `:ok`
## Examples
iex> Functor.is_error?({:ok, 12})
false
iex> Functor.is_error?(12)
true
iex> Functor.is_error?(:error)
true
iex> Functor.is_error?({:error, "This is an error."})
true
"""
@spec is_error?(result()) :: boolean()
def is_error?({:ok, _}), do: false
def is_error?(_), do: true
end
|
lib/functor.ex
| 0.90567
| 0.417984
|
functor.ex
|
starcoder
|
defmodule Kitt.Util do
@moduledoc """
Utility functions for interacting with data frames and elements
for cleaner readability across modules.
"""
alias Kitt.Message.{BSM, CSR, EVA, ICA, MAP, PSM, RSA, SPAT, SRM, SSM, TIM}
@doc """
Converts an integer to its 4-byte binary representation
for compatibility with common J2735 ID field spec.
"""
@spec encode_id(non_neg_integer()) :: binary()
def encode_id(id) do
encoded_id = :binary.encode_unsigned(id)
padding = (4 - byte_size(encoded_id)) * 8
<<0::size(padding)>> <> encoded_id
end
@doc """
Converts a fixed-size byte identifier field to a human-readable integer
"""
@spec decode_id(binary()) :: non_neg_integer()
def decode_id(id), do: :binary.decode_unsigned(id)
@doc """
"""
@spec type(atom() | non_neg_integer()) :: {module(), atom()}
def type(type) when type in [:bsm, 20], do: {BSM, BSM.type()}
def type(type) when type in [:csr, 21], do: {CSR, CSR.type()}
def type(type) when type in [:eva, 22], do: {EVA, EVA.type()}
def type(type) when type in [:ica, 23], do: {ICA, ICA.type()}
def type(type) when type in [:map, 18], do: {MAP, MAP.type()}
def type(type) when type in [:psm, 32], do: {PSM, PSM.type()}
def type(type) when type in [:rsa, 27], do: {RSA, RSA.type()}
def type(type) when type in [:spat, 19], do: {SPAT, SPAT.type()}
def type(type) when type in [:srm, 29], do: {SRM, SRM.type()}
def type(type) when type in [:ssm, 30], do: {SSM, SSM.type()}
def type(type) when type in [:tim, 31], do: {TIM, TIM.type()}
@doc """
Parses a keyword list of options and returns the desired output
format for an encode function or the expected input format for
a decode function.
Defaults to hexadecimal format via the `:hex` atom if the `:format`
option is not supplied.
Returns an `{:error, reason}` if an unsupported value is supplied
for the `:format` option.
"""
@spec get_format(keyword()) :: :hex | :binary | {:error, term()}
def get_format(opts) do
case Keyword.get(opts, :format) do
hex when hex in [nil, :hex] -> :hex
:binary -> :binary
other -> {:error, "#{inspect(other)} is an unsupported format"}
end
end
@doc """
Ensures data is converted to binary format for compatibility with
various fixed-size data element fields in the J2735 message set.
"""
@spec to_binary(binary() | integer()) :: binary()
def to_binary(data) when is_binary(data), do: data
def to_binary(data) when is_integer(data), do: <<data>>
@doc """
Recursively converts a Kitt message struct to its equivalent Elixir
map to allow it to be encoded to binary format. Reduces over the
fields of the struct and converts it and any sub-fields defined as structs
to a map.
"""
@spec to_map_recursive(term()) :: term()
def to_map_recursive(%{__struct__: _type} = element) when is_map(element) do
element |> Map.from_struct() |> reduce_element()
end
def to_map_recursive(element) when is_map(element) do
reduce_element(element)
end
def to_map_recursive(element), do: element
defp reduce_element(element) do
Enum.reduce(element, %{}, &put_non_nil_elements/2)
end
defp put_non_nil_elements({_key, value}, acc) when value == nil, do: acc
defp put_non_nil_elements({key, value}, acc) do
Map.put(acc, key, to_map_recursive(value))
end
end
|
lib/kitt/util.ex
| 0.867387
| 0.651698
|
util.ex
|
starcoder
|
defmodule RandomAccessList do
@moduledoc """
A random access list is a persistent list data structure that has O(log n) time lookups and updates,
while maintaining a constant time for cons, tail and head operations.
This compares to a standard list that has a O(i) time for lookups and updates, with i being the index.
"""
defstruct list: []
@type random_access_list() :: %__MODULE__{}
@type index() :: integer()
@doc """
Instantiates a random access list from a standard list.
"""
@spec new(list()) :: random_access_list()
def new(list, acc \\ [])
def new([], acc), do: struct(__MODULE__, list: Enum.reverse(acc))
def new(list, acc) when is_list(list) do
size =
list
|> length()
|> greedy_skew_binary_decomposition()
{head, tail} = Enum.split(list, size)
new(tail, [CompleteBinaryTree.new(head) | acc])
end
@doc """
Returns the element at the index provided. If the index is
not present in the random access list `:error` is returned.
"""
@spec new(random_access_list(), index()) :: {:ok, any()} | :error
def fetch(%__MODULE__{list: list}, index), do: fetch_(list, index)
defp fetch_([], _index), do: :error
defp fetch_([head | tail], index) do
if index < CompleteBinaryTree.size(head) do
CompleteBinaryTree.fetch(head, index)
else
fetch_(tail, index - CompleteBinaryTree.size(head))
end
end
@doc """
Updates an element in a random access list at the index provided,
using the update function that is passed.
"""
@spec update_at(random_access_list(), index(), fun()) :: random_access_list()
def update_at(%__MODULE__{list: list}, index, update_func) do
%__MODULE__{list: update_at_(list, [], index, update_func)}
end
defp update_at_([], acc, _index, _update_func), do: Enum.reverse(acc)
defp update_at_([head | tail], acc, index, update_func) do
if index < CompleteBinaryTree.size(head) do
updated_tree = CompleteBinaryTree.update_at(head, index, update_func)
Enum.reverse([updated_tree | acc]) ++ tail
else
update_at_(tail, [head | acc], index - CompleteBinaryTree.size(head), update_func)
end
end
defp greedy_skew_binary_decomposition(number) do
trunc(:math.pow(2, :math.floor(:math.log(number + 1) / :math.log(2))) - 1)
end
end
|
lib/random_access_list.ex
| 0.752831
| 0.421046
|
random_access_list.ex
|
starcoder
|
defmodule OneDHCPD.IPCalculator do
@moduledoc """
This module handles IP address calculations.
The most involved of the calculations is the determination of a good IP
subnet to use. OneDHCPD subnet tries for the following:
* Subnets should be the same across reboots (convenience and no surprise conflicts)
* Support running on more than one interface (unique subnets on device)
* Support hosts than have more than one device (unique subnets between
devices)
* Don't conflict with the IPs used on the host for Internet
The algorithm here is to hash the hostname (uniqueness between devices
assuming the hostname is unique) and the network interface (uniqueness on
device). Then use those bits to pick a subnet in the 172.31.0.0/16
private address range. That private range was picked arbitrarily since
10.0.0.0/8 and 192.168.0.0/16 ranges are commonly used. 172.18.0.0/16
is used by Docker.
"""
@doc """
Return the subnet mask.
Currently this is hardcoded to a /30 network.
"""
@spec mask() :: :inet.ip4_address()
def mask(), do: {255, 255, 255, 252}
@doc """
Return the prefix length that OneDHCPD uses.
"""
@spec prefix_length() :: 30
def prefix_length(), do: 30
@doc """
Calculate the default subnet for the specified Ethernet interface.
"""
@spec default_subnet(String.t()) :: :inet.ip4_address()
def default_subnet(ifname) do
{:ok, hostname} = :inet.gethostname()
default_subnet(ifname, to_string(hostname))
end
@doc """
Calculate the default subnet for the specified Ethernet interface
and specify the hostname manually.
"""
@spec default_subnet(String.t(), String.t()) :: :inet.ip4_address()
def default_subnet(ifname, hostname) do
# compute 14 random bits for the subnet
<<unique_bits::14-bits, _leftovers::bits()>> = :crypto.hash(:md5, [hostname, ifname])
prefix = <<172, 31>>
# Build the IP address as a binary and extract the individual bytes
<<a, b, c, d>> = <<prefix::16-bits, unique_bits::14-bits, 0::integer-size(2)>>
{a, b, c, d}
end
@doc """
Return our IP address. This is the one that should be set
as a static address on the interface if using the defaults.
"""
@spec our_ip_address(:inet.ip4_address()) :: :inet.ip4_address()
def our_ip_address({a, b, c, d}) do
{a, b, c, d + 1}
end
@doc """
Return the IP address that's given out to the client.
"""
@spec their_ip_address(:inet.ip4_address()) :: :inet.ip4_address()
def their_ip_address({a, b, c, d}) do
{a, b, c, d + 2}
end
end
|
lib/one_dhcpd/ip_calculator.ex
| 0.804675
| 0.626196
|
ip_calculator.ex
|
starcoder
|
import Croma.Defun
alias Croma.Result, as: R
defmodule Croma.TypeGen do
@moduledoc """
Module that defines macros for ad-hoc (in other words "in-line") module definitions.
"""
@doc """
Creates a new module that represents a nilable type, based on the given type module `module`.
Using the given type module `nilable/1` generates a new module that defines:
- `@type t :: nil | module.t`
- `@spec valid?(term) :: boolean`
- `@spec default() :: nil`
- If the given module exports `new/1`
- `@spec new(term) :: Croma.Result.t(t)`
- `@spec new!(term) :: t`
This is useful in defining a struct with nilable fields using `Croma.Struct`.
## Examples
iex> use Croma
...> defmodule I do
...> use Croma.SubtypeOfInt, min: 0
...> end
...> defmodule S do
...> use Croma.Struct, fields: [not_nilable_int: I, nilable_int: Croma.TypeGen.nilable(I)]
...> end
...> S.new(%{not_nilable_int: 0, nilable_int: nil})
%S{nilable_int: nil, not_nilable_int: 0}
"""
defmacro nilable(module) do
nilable_impl(Macro.expand(module, __CALLER__), Macro.Env.location(__CALLER__))
end
defp nilable_impl(mod, location) do
module_body = Macro.escape(nilable_module_body(mod))
quote bind_quoted: [mod: mod, module_body: module_body, location: location] do
name = Module.concat(Croma.TypeGen.Nilable, mod)
Croma.TypeGen.ensure_module_defined(name, module_body, location)
name
end
end
defp nilable_module_body(mod) do
quote bind_quoted: [mod: mod] do
@moduledoc false
@mod mod
@type t :: nil | unquote(@mod).t
defun valid?(value :: term) :: boolean do
nil -> true
v -> @mod.valid?(v)
end
# Invoking `module_info/1` on `mod` automatically compiles and loads the module if necessary.
if {:new, 1} in @mod.module_info(:exports) do
defun new(value :: term) :: R.t(t) do
nil -> {:ok, nil}
v -> @mod.new(v) |> R.map_error(fn reason -> R.ErrorReason.add_context(reason, __MODULE__) end)
end
defun new!(term :: term) :: t do
new(term) |> R.get!()
end
end
defun default() :: t, do: nil
end
end
@doc """
An ad-hoc version of `Croma.SubtypeOfList`.
Options:
- `:define_default0?` - Boolean value that indicates whether to define `default/0` (which simply returns `[]`). Defaults to `true`.
"""
defmacro list_of(module, options \\ []) do
list_of_impl(Macro.expand(module, __CALLER__), Macro.Env.location(__CALLER__), options)
end
defp list_of_impl(mod, location, options) do
module_body = Macro.escape(list_of_module_body(mod, options))
quote bind_quoted: [mod: mod, module_body: module_body, location: location, options: options] do
prefix = if Keyword.get(options, :define_default0?, true), do: Croma.TypeGen.ListOf, else: Croma.TypeGen.ListOfNoDefault0
name = Module.concat(prefix, mod)
Croma.TypeGen.ensure_module_defined(name, module_body, location)
name
end
end
defp list_of_module_body(mod, options) do
quote bind_quoted: [mod: mod, options: options] do
@moduledoc false
@mod mod
@type t :: [unquote(@mod).t]
defun valid?(list :: term) :: boolean do
l when is_list(l) -> Enum.all?(l, &@mod.valid?/1)
_ -> false
end
# Invoking `module_info/1` on `mod` automatically compiles and loads the module if necessary.
if {:new, 1} in @mod.module_info(:exports) do
defun new(list :: term) :: R.t(t) do
l when is_list(l) -> Enum.map(l, &@mod.new/1) |> R.sequence()
_ -> {:error, {:invalid_value, [__MODULE__]}}
end
defun new!(term :: term) :: t do
new(term) |> R.get!()
end
end
if Keyword.get(options, :define_default0?, true) do
defun default() :: t, do: []
end
end
end
@doc """
Creates a new module that represents a sum type of the given types.
The argument must be a list of type modules.
Note that the specified types should be mutually disjoint;
otherwise `new/1` can return unexpected results depending on the order of the type modules.
"""
defmacro union(modules) do
ms = Enum.map(modules, fn m -> Macro.expand(m, __CALLER__) end)
if Enum.empty?(ms), do: raise "Empty union is not allowed"
union_impl(ms, Macro.Env.location(__CALLER__))
end
defp union_impl(modules, location) do
module_body = Macro.escape(union_module_body(modules))
quote bind_quoted: [modules: modules, module_body: module_body, location: location] do
hash = Enum.map(modules, &Atom.to_string/1) |> :erlang.md5() |> Base.encode16()
name = Module.concat(Croma.TypeGen.Union, hash)
Croma.TypeGen.ensure_module_defined(name, module_body, location)
name
end
end
defp union_module_body(modules) do
quote bind_quoted: [modules: modules] do
@moduledoc false
@modules modules
@type t :: unquote(Enum.map(@modules, fn m -> quote do: unquote(m).t end) |> Croma.TypeUtil.list_to_type_union())
defun valid?(value :: term) :: boolean do
Enum.any?(@modules, fn mod -> mod.valid?(value) end)
end
module_flag_pairs = Enum.map(@modules, fn m -> {m, {:new, 1} in m.module_info(:exports)} end)
Enum.each(module_flag_pairs, fn {mod, has_new1} ->
if has_new1 do
defp call_new_or_validate(unquote(mod), v) do
unquote(mod).new(v)
end
else
defp call_new_or_validate(unquote(mod), v) do
Croma.Result.wrap_if_valid(v, unquote(mod))
end
end
end)
defun new(v :: term) :: R.t(t) do
new_impl(v, @modules) |> R.map_error(fn _ -> {:invalid_value, [__MODULE__]} end)
end
defp new_impl(v, [m]) do
call_new_or_validate(m, v)
end
defp new_impl(v, [m | ms]) do
require R
call_new_or_validate(m, v) |> R.or_else(new_impl(v, ms))
end
defun new!(term :: term) :: t do
new(term) |> R.get!()
end
end
end
@doc """
Creates a new module that simply represents a type whose sole member is the given value.
Only atoms and integers are supported.
"""
defmacro fixed(value) do
fixed_impl(value, Macro.Env.location(__CALLER__))
end
defp fixed_impl(value, location) when is_atom(value) or is_integer(value) do
module_body = Macro.escape(fixed_module_body(value))
quote bind_quoted: [value: value, module_body: module_body, location: location] do
hash = :erlang.term_to_binary(value) |> :erlang.md5() |> Base.encode16()
name = Module.concat(Croma.TypeGen.Fixed, hash)
Croma.TypeGen.ensure_module_defined(name, module_body, location)
name
end
end
defp fixed_module_body(value) do
quote bind_quoted: [value: value] do
@moduledoc false
@value value
@type t :: unquote(@value)
defun valid?(v :: term) :: boolean do
v == @value
end
defun default() :: t, do: @value
end
end
@doc false
def ensure_module_defined(name, quoted_expr, location) do
# Skip creating module if its beam file is already generated by previous compilation
if :code.which(name) == :non_existing do
# Use processes' registered names (just because it's easy) to remember whether already defined or not
# (Using `module_info` leads to try-rescue, which results in compilation error:
# see https://github.com/elixir-lang/elixir/issues/4055)
case Agent.start(fn -> nil end, [name: name]) do
{:ok , _pid } -> Module.create(name, quoted_expr, location)
{:error, _already_defined} -> nil
end
end
end
@doc false
def define_nilable_and_list_of(mod) do
location = Macro.Env.location(__ENV__)
q1 = nilable_impl(mod, location)
q2 = list_of_impl(mod, location, [define_default0?: true ])
q3 = list_of_impl(mod, location, [define_default0?: false])
Code.eval_quoted(q1, [], __ENV__)
Code.eval_quoted(q2, [], __ENV__)
Code.eval_quoted(q3, [], __ENV__)
end
end
# Predefine some type modules to avoid warnings when generated by multiple mix projects
defmodule Croma.PredefineVariantsOfBuiltinTypes do
@moduledoc false
Croma.BuiltinType.all() |> Enum.each(&Croma.TypeGen.define_nilable_and_list_of/1)
end
|
lib/croma/type_gen.ex
| 0.823648
| 0.455501
|
type_gen.ex
|
starcoder
|
defmodule Contentful.Delivery do
@moduledoc """
The `Contentful.Delivery` module offers functions to interact with the [Contentful Delivery API](https://www.contentful.com/developers/docs/references/content-delivery-api/) (CDA).
The API is _read only_. If you wish to manipulate data, have a look at the Management API.
"""
import HTTPoison, only: [get: 2]
alias HTTPoison.Response
@endpoint "cdn.contentful.com"
@protocol "https"
@separator "/"
@agent_header [
"User-Agent": "Contentful Elixir SDK"
]
@accept_header [
accept: "application/json"
]
@doc """
Gets the json library for the Contentful Delivery API based
on the config/config.exs.
"""
@spec json_library :: module()
def json_library do
Contentful.json_library()
end
@doc """
constructs the base url with protocol for the CDA
## Examples
"https://cdn.contentful.com" = url()
"""
@spec url() :: String.t()
def url do
"#{@protocol}://#{@endpoint}"
end
@doc """
constructs the base url with the space id that got configured in config.exs
"""
def url(space) when is_nil(space) do
case space_from_config() do
nil ->
url()
space ->
space |> url
end
end
@doc """
constructs the base url with the extension for a given space
## Examples
"https://cdn.contentful.com/spaces/foo" = url("foo")
"""
@spec url(String.t() | nil) :: String.t()
def url(space) do
[url(), "spaces", space] |> Enum.join(@separator)
end
@doc """
When explicilty given `nil`, will fetch the `environment` from the environments
current config (see `config/config.exs`). Will fall back to `"master"` if no environment
is set.
## Examples
"https://cdn.contentful.com/spaces/foo/environments/master" = url("foo", nil)
# With config set in config/config.exs
config :contentful_delivery, environment: "staging"
"https://cdn.contentful.com/spaces/foo/environments/staging" = url("foo", nil)
"""
@spec url(String.t(), nil) :: String.t()
def url(space, env) when is_nil(env) do
[space |> url(), "environments", environment_from_config()]
|> Enum.join(@separator)
end
@doc """
constructs the base url for the delivery endpoint for a given space and environment
## Examples
"https://cdn.contentful.com/spaces/foo/environments/bar" = url("foo", "bar")
"""
def url(space, env) do
[space |> url(), "environments", env] |> Enum.join(@separator)
end
@doc """
Builds the request headers for a request against the CDA, taking api access tokens into account
## Examples
my_access_token = "<PASSWORD>"
[
"Authorization": "Bearer <PASSWORD>",
"User-Agent": "Contentful Elixir SDK",
"Accept": "application/json"
] = my_access_token |> request_headers()
"""
@spec request_headers(String.t()) :: keyword()
def request_headers(api_key) do
api_key
|> authorization_header()
|> Keyword.merge(@agent_header)
|> Keyword.merge(@accept_header)
end
@doc """
Sends a request against the CDA. It's really just a wrapper around HTTPoison.get/2
"""
@spec send_request(tuple()) :: {:ok, Response.t()}
def send_request({url, headers}) do
get(url, headers)
end
@doc """
Prevents parsing of empty options.
## Examples
"" = collection_query_params([])
"""
def collection_query_params([]) do
""
end
@doc """
parses the options for retrieving a collection. It will drop any option that is not in
@collection_filters ([:limit, :skip])
## Examples
"?limit=50&skip=25&order=foobar"
= collection_query_params(limit: 50, baz: "foo", skip: 25, order: "foobar", bar: 42)
"""
@spec collection_query_params(limit: pos_integer(), skip: non_neg_integer()) :: String.t()
def collection_query_params(options) do
params =
options
|> Keyword.take([:limit, :skip])
|> URI.encode_query()
"?#{params}"
end
@doc """
Parses the response from the CDA and triggers a callback on success
"""
@spec parse_response({:ok, Response.t()}, fun()) ::
{:ok, struct()}
| {:ok, list(struct()), total: non_neg_integer()}
| {:error, :rate_limit_exceeded, wait_for: integer()}
| {:error, atom(), original_message: String.t()}
def parse_response(
{:ok, %Response{status_code: code, body: body} = resp},
callback
) do
case code do
200 ->
body |> json_library().decode! |> callback.()
401 ->
body |> build_error(:unauthorized)
404 ->
body |> build_error(:not_found)
_ ->
resp |> build_error()
end
end
@doc """
catch_all for any errors during flight (connection loss, etc.)
"""
@spec parse_response({:error, any()}, fun()) :: {:error, :unknown}
def parse_response({:error, _}, _callback) do
build_error()
end
@doc """
Used to construct generic errors for calls against the CDA
"""
@spec build_error(String.t(), atom()) ::
{:error, atom(), original_message: String.t()}
def build_error(response_body, status) do
{:ok, %{"message" => message}} = response_body |> json_library().decode()
{:error, status, original_message: message}
end
@doc """
Used for the rate limit exceeded error, as it gives the user extra information on wait times
"""
@spec build_error(Response.t()) ::
{:error, :rate_limit_exceeded, wait_for: integer()}
def build_error(%Response{
status_code: 429,
headers: [{"x-contentful-rate-limit-exceeded", seconds}, _]
}) do
{:error, :rate_limit_exceeded, wait_for: seconds}
end
@doc """
Used to make a generic error, in case the API Response is not what is expected
"""
@spec build_error() :: {:error, :unknown}
def build_error do
{:error, :unknown}
end
defp authorization_header(token) when is_nil(token) do
api_key_from_configuration() |> authorization_header()
end
defp authorization_header(token) do
[authorization: "Bearer #{token}"]
end
defp api_key_from_configuration do
config(:api_key, "")
end
defp environment_from_config do
config(:environment, "master")
end
defp space_from_config do
config(:space, nil)
end
@doc """
Can be used to retrieve configuration for the `Contentful.Delivery` module
## Examples
config :contentful, delivery: [
my_config: "foobar"
]
"foobar" = Contentful.Delivery.config(:my_config)
"""
@spec config(atom(), any() | nil) :: any()
def config(setting, default \\ nil) do
config() |> Keyword.get(setting, default)
end
@doc """
loads the configuration for the delivery module from the contentful app configuration
"""
@spec config() :: list(keyword())
def config do
Application.get_env(:contentful, :delivery, [])
end
end
|
lib/contentful_delivery/delivery.ex
| 0.875521
| 0.53048
|
delivery.ex
|
starcoder
|
defmodule Fiet.RSS2 do
@moduledoc """
RSS 2.0 parser, comply with [RSS 2.0 at Harvard Law](http://cyber.harvard.edu/rss/rss.html).
"""
use Fiet.RSS2.Engine
@doc """
Parses RSS 2.0 XML document.
## Example
iex> rss2 = File.read!("/path/to/rss2.xml")
iex> Fiet.RSS2.parse(rss2)
{:ok,
%Fiet.RSS2.Channel{
categories: [
%Fiet.RSS2.Category{domain: "https://example.com/categories/science", value: "Science"},
%Fiet.RSS2.Category{domain: nil, value: "Space"}
],
cloud: %Fiet.RSS2.Channel.Cloud{
domain: "rpc.sys.com",
path: "/RPC2",
port: "80",
protocol: "xml-rpc",
register_procedure: nil
},
copyright: nil,
description: "Liftoff to Space Exploration.",
docs: "http://blogs.law.harvard.edu/tech/rss",
extras: %{},
generator: "Weblog Editor 2.0",
image: %Fiet.RSS2.Image{
description: "The logo of Liftoff News",
height: "50",
link: "https://www.liftoff.msfc.nasa.gov/",
title: "Liftoff News Logo",
url: "https://www.example.com/images/logo.png",
width: "50"
},
items: [
%Fiet.RSS2.Item{
author: nil,
categories: [%Fiet.RSS2.Category{domain: nil, value: "Space"} | nil],
comments: nil,
description: "How do Americans get ready to work with Russians aboard the International Space Station? They take a crash course in culture, language and protocol at Russia's <a href=\"http://howe.iki.rssi.ru/GCTC/gctc_e.htm\">Star City</a>.",
enclosure: %Fiet.RSS2.Item.Enclosure{
length: "78645",
type: "video/wmv",
url: "https://www.w3schools.com/media/3d.wmv"
},
extras: %{},
guid: "http://liftoff.msfc.nasa.gov/2003/06/03.html#item573",
link: "http://liftoff.msfc.nasa.gov/news/2003/news-starcity.asp",
pub_date: "Tue, 03 Jun 2003 09:39:21 GMT",
source: nil,
title: "Star City"
}
}
}
"""
@spec parse(document :: binary) :: {:ok, channel :: Fiet.RSS2.Channel.t()}
def parse(document)
end
|
lib/fiet/rss2.ex
| 0.720663
| 0.420659
|
rss2.ex
|
starcoder
|
defmodule Sidewalk do
@moduledoc ~S"""
Sidewalk is an Elixir client which is compatible with Sidekiq, the »efficient background processing library for Ruby«.
It can be used to enqueue jobs for later processing alongside e.g. with an already existing Ruby application.
For more information about Sidekiq please refer to http://sidekiq.org.
## Supported features
* Redis namespaces as already known with Sidekiq
* Ability to configure the Redis server connection details
* Ability to configuration a Redis pool size
* Enqueuing jobs to be executed immediately
* Enqueuing jobs to be executed in X seconds
* Enqueuing jobs to be executed at a specific time
## Configuration example
config :sidewalk,
host: "localhost",
port: 6379,
password: "<PASSWORD>",
namespace: "your_namespace",
database: 0,
pool_size: 10
## Adding sidewalk to your applications
def application do
[applications: [:sidewalk],
mod: {YourApplication, []}]
end
To use Sidewalk you need to create a `%Sidewalk.Job{}` and enqueue it with one of the enqueue functions.
"""
use Application
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
:poolboy.child_spec(:sidewalk_pool, pool_options(), redix_options())
]
opts = [strategy: :one_for_one, name: Sidewalk.Supervisor]
Supervisor.start_link(children, opts)
end
defp pool_options do
[
name: {:local, :sidewalk_pool},
worker_module: Redix,
size: Application.get_env(:sidewalk, :pool_size, 5),
max_overflow: 0
]
end
def redix_options do
redix_default_options()
|> redix_options_from_environment()
end
defp redix_default_options do
[
host: Application.get_env(:sidewalk, :host, "localhost"),
port: Application.get_env(:sidewalk, :port, 6379),
password: Application.get_env(:sidewalk, :password),
database: Application.get_env(:sidewalk, :database, 0)
]
end
defp redix_options_from_environment(options) do
Enum.map(options, fn {key, value} ->
case value do
{:system, env} ->
{key, System.get_env(env)}
value ->
{key, value}
end
end)
end
end
|
lib/sidewalk.ex
| 0.604399
| 0.409486
|
sidewalk.ex
|
starcoder
|
defmodule Explorer.DataFrame do
@moduledoc """
The DataFrame struct and API.
"""
alias __MODULE__, as: DataFrame
alias Explorer.Series
import Explorer.Shared, only: [impl!: 1]
import Nx.Defn.Kernel, only: [keyword!: 2]
@type data :: Explorer.Backend.DataFrame.t()
@type t :: %DataFrame{data: data}
@enforce_keys [:data, :groups]
defstruct [:data, :groups]
# Access
@behaviour Access
@impl true
def fetch(df, columns) when is_list(columns) do
{:ok, select(df, columns)}
end
@impl true
def fetch(df, column) when is_binary(column) do
{:ok, pull(df, column)}
end
@impl true
def pop(df, column) when is_binary(column) do
{pull(df, column), select(df, [column], :drop)}
end
def pop(df, columns) when is_list(columns) do
{select(df, columns), select(df, columns, :drop)}
end
@impl true
def get_and_update(df, column, fun) when is_binary(column) do
value = pull(df, column)
{current_value, new_value} = fun.(value)
new_data = mutate(df, [{String.to_atom(column), new_value}])
{current_value, new_data}
end
# IO
@doc """
Reads a delimited file into a dataframe.
## Options
* `delimiter` - A single character used to separate fields within a record. (default: `","`)
* `dtypes` - A keyword list of `[column_name: dtype]`. If `nil`, dtypes are imputed from the first 1000 rows. (default: `nil`)
* `header?` - Does the file have a header of column names as the first row or not? (default: `true`)
* `max_rows` - Maximum number of lines to read. (default: `Inf`)
* `names` - A list of column names. Must match the width of the dataframe. (default: nil)
* `null_character` - The string that should be interpreted as a nil value. (default: `"NA"`)
* `skip_rows` - The number of lines to skip at the beginning of the file. (default: `0`)
* `with_columns` - A list of column names to keep. If present, only these columns are read
* into the dataframe. (default: `nil`)
"""
@spec read_csv(filename :: String.t(), opts :: Keyword.t()) ::
{:ok, DataFrame.t()} | {:error, term()}
def read_csv(filename, opts \\ []) do
opts =
keyword!(opts,
delimiter: ",",
dtypes: nil,
encoding: "utf8",
header?: true,
max_rows: Inf,
names: nil,
null_character: "NA",
skip_rows: 0,
with_columns: nil
)
backend = backend_from_options!(opts)
backend.read_csv(
filename,
opts[:names],
opts[:dtypes],
opts[:delimiter],
opts[:null_character],
opts[:skip_rows],
opts[:header?],
opts[:encoding],
opts[:max_rows],
opts[:with_columns]
)
end
@doc """
Similar to `read_csv/2` but raises if there is a problem reading the CSV.
"""
@spec read_csv!(filename :: String.t(), opts :: Keyword.t()) :: DataFrame.t()
def read_csv!(filename, opts \\ []) do
case read_csv(filename, opts) do
{:ok, df} -> df
{:error, error} -> raise "#{error}"
end
end
@doc """
Writes a dataframe to a delimited file.
## Options
* `header?` - Should the column names be written as the first line of the file? (default: `true`)
* `delimiter` - A single character used to separate fields within a record. (default: `","`)
"""
@spec write_csv(df :: DataFrame.t(), filename :: String.t(), opts :: Keyword.t()) ::
{:ok, String.t()} | {:error, term()}
def write_csv(df, filename, opts \\ []) do
opts = keyword!(opts, header?: true, delimiter: ",")
apply_impl(df, :write_csv, [filename, opts[:header?], opts[:delimiter]])
end
@doc """
Similar to `write_csv/3` but raises if there is a problem reading the CSV.
"""
@spec write_csv!(df :: DataFrame.t(), filename :: String.t(), opts :: Keyword.t()) :: String.t()
def write_csv!(df, filename, opts \\ []) do
case write_csv(df, filename, opts) do
{:ok, filename} -> filename
{:error, error} -> raise "#{error}"
end
end
@doc """
Creates a new dataframe from a map of lists.
Lists must be the same length. This function calls `Explorer.Series.from_list/2` so lists must
conform to the requirements for making a series.
## Options
* `backend` - The Explorer backend to use. Defaults to the value returned by `Explorer.default_backend/0`.
## Examples
iex> Explorer.DataFrame.from_map(%{floats: [1.0, 2.0], ints: [1, nil]})
#Explorer.DataFrame<
[rows: 2, columns: 2]
floats float [1.0, 2.0]
ints integer [1, nil]
>
"""
@spec from_map(map :: map(), opts :: Keyword.t()) :: DataFrame.t()
def from_map(map, opts \\ []) do
backend = backend_from_options!(opts)
backend.from_map(map)
end
@doc """
Converts a dataframe to a map.
By default, the constituent series of the dataframe are converted to Elixir lists.
## Examples
iex> df = Explorer.DataFrame.from_map(%{floats: [1.0, 2.0], ints: [1, nil]})
iex> Explorer.DataFrame.to_map(df)
%{floats: [1.0, 2.0], ints: [1, nil]}
"""
@spec to_map(df :: DataFrame.t(), convert_series? :: boolean()) :: map()
def to_map(df, convert_series? \\ true), do: apply_impl(df, :to_map, [convert_series?])
@doc """
Writes a dataframe to a binary representation of a delimited file.
## Options
* `header?` - Should the column names be written as the first line of the file? (default: `true`)
* `delimiter` - A single character used to separate fields within a record. (default: `","`)
## Examples
iex> df = Explorer.Datasets.fossil_fuels()
iex> df |> Explorer.DataFrame.head() |> Explorer.DataFrame.to_binary()
"year,country,total,solid_fuel,liquid_fuel,gas_fuel,cement,gas_flaring,per_capita,bunker_fuels\\n2010,AFGHANISTAN,2308,627,1601,74,5,0,0.08,9\\n2010,ALBANIA,1254,117,953,7,177,0,0.43,7\\n2010,ALGERIA,32500,332,12381,14565,2598,2623,0.9,663\\n2010,ANDORRA,141,0,141,0,0,0,1.68,0\\n2010,ANGOLA,7924,0,3649,374,204,3697,0.37,321\\n"
"""
@spec to_binary(df :: DataFrame.t(), opts :: Keyword.t()) :: String.t()
def to_binary(df, opts \\ []) do
opts = keyword!(opts, header?: true, delimiter: ",")
apply_impl(df, :to_binary, [opts[:header?], opts[:delimiter]])
end
# Introspection
@doc """
Gets the names of the dataframe columns.
## Examples
iex> df = Explorer.DataFrame.from_map(%{floats: [1.0, 2.0], ints: [1, 2]})
iex> Explorer.DataFrame.names(df)
["floats", "ints"]
"""
@spec names(df :: DataFrame.t()) :: [String.t()]
def names(df), do: apply_impl(df, :names)
@doc """
Gets the dtypes of the dataframe columns.
## Examples
iex> df = Explorer.DataFrame.from_map(%{floats: [1.0, 2.0], ints: [1, 2]})
iex> Explorer.DataFrame.dtypes(df)
[:float, :integer]
"""
@spec dtypes(df :: DataFrame.t()) :: [atom()]
def dtypes(df), do: apply_impl(df, :dtypes)
@doc """
Gets the shape of the dataframe as a `{height, width}` tuple.
## Examples
iex> df = Explorer.DataFrame.from_map(%{floats: [1.0, 2.0, 3.0], ints: [1, 2, 3]})
iex> Explorer.DataFrame.shape(df)
{3, 2}
"""
@spec shape(df :: DataFrame.t()) :: {integer(), integer()}
def shape(df), do: apply_impl(df, :shape)
@doc """
Returns the number of rows in the dataframe.
## Examples
iex> df = Explorer.Datasets.fossil_fuels()
iex> Explorer.DataFrame.n_rows(df)
1094
"""
@spec n_rows(df :: DataFrame.t()) :: integer()
def n_rows(df), do: apply_impl(df, :n_rows)
@doc """
Returns the number of columns in the dataframe.
## Examples
iex> df = Explorer.Datasets.fossil_fuels()
iex> Explorer.DataFrame.n_cols(df)
10
"""
@spec n_cols(df :: DataFrame.t()) :: integer()
def n_cols(df), do: apply_impl(df, :n_cols)
@doc """
Returns the groups of a dataframe.
## Examples
iex> df = Explorer.Datasets.fossil_fuels()
iex> df = Explorer.DataFrame.group_by(df, "country")
iex> Explorer.DataFrame.groups(df)
["country"]
"""
@spec groups(df :: DataFrame.t()) :: list(String.t())
def groups(%DataFrame{groups: groups}), do: groups
# Single table verbs
@doc """
Returns the first *n* rows of the dataframe.
## Examples
iex> df = Explorer.Datasets.fossil_fuels()
iex> Explorer.DataFrame.head(df)
#Explorer.DataFrame<
[rows: 5, columns: 10]
year integer [2010, 2010, 2010, 2010, 2010]
country string ["AFGHANISTAN", "ALBANIA", "ALGERIA", "ANDORRA", "ANGOLA"]
total integer [2308, 1254, 32500, 141, 7924]
solid_fuel integer [627, 117, 332, 0, 0]
liquid_fuel integer [1601, 953, 12381, 141, 3649]
gas_fuel integer [74, 7, 14565, 0, 374]
cement integer [5, 177, 2598, 0, 204]
gas_flaring integer [0, 0, 2623, 0, 3697]
per_capita float [0.08, 0.43, 0.9, 1.68, 0.37]
bunker_fuels integer [9, 7, 663, 0, 321]
>
"""
@spec head(df :: DataFrame.t(), nrows :: integer()) :: DataFrame.t()
def head(df, nrows \\ 5), do: apply_impl(df, :head, [nrows])
@doc """
Returns the last *n* rows of the dataframe.
## Examples
iex> df = Explorer.Datasets.fossil_fuels()
iex> Explorer.DataFrame.tail(df)
#Explorer.DataFrame<
[rows: 5, columns: 10]
year integer [2014, 2014, 2014, 2014, 2014]
country string ["VIET NAM", "WALLIS AND FUTUNA ISLANDS", "YEMEN", "ZAMBIA", "ZIMBABWE"]
total integer [45517, 6, 6190, 1228, 3278]
solid_fuel integer [19246, 0, 137, 132, 2097]
liquid_fuel integer [12694, 6, 5090, 797, 1005]
gas_fuel integer [5349, 0, 581, 0, 0]
cement integer [8229, 0, 381, 299, 177]
gas_flaring integer [0, 0, 0, 0, 0]
per_capita float [0.49, 0.44, 0.24, 0.08, 0.22]
bunker_fuels integer [761, 1, 153, 33, 9]
>
"""
@spec tail(df :: DataFrame.t(), nrows :: integer()) :: DataFrame.t()
def tail(df, nrows \\ 5), do: apply_impl(df, :tail, [nrows])
@doc """
Selects a subset of columns by name.
Can optionally return all *but* the named columns if `:drop` is passed as the last argument.
## Examples
You can select columns with a list of names:
iex> df = Explorer.DataFrame.from_map(%{a: ["a", "b", "c"], b: [1, 2, 3]})
iex> Explorer.DataFrame.select(df, ["a"])
#Explorer.DataFrame<
[rows: 3, columns: 1]
a string ["a", "b", "c"]
>
Or you can use a callback function that takes the dataframe's names as its first argument:
iex> df = Explorer.DataFrame.from_map(%{a: ["a", "b", "c"], b: [1, 2, 3]})
iex> Explorer.DataFrame.select(df, &String.starts_with?(&1, "b"))
#Explorer.DataFrame<
[rows: 3, columns: 1]
b integer [1, 2, 3]
>
If you pass `:drop` as the third argument, it will return all but the named columns:
iex> df = Explorer.DataFrame.from_map(%{a: ["a", "b", "c"], b: [1, 2, 3]})
iex> Explorer.DataFrame.select(df, ["b"], :drop)
#Explorer.DataFrame<
[rows: 3, columns: 1]
a string ["a", "b", "c"]
>
iex> df = Explorer.DataFrame.from_map(%{a: ["a", "b", "c"], b: [1, 2, 3], c: [4, 5, 6]})
iex> Explorer.DataFrame.select(df, ["a", "b"], :drop)
#Explorer.DataFrame<
[rows: 3, columns: 1]
c integer [4, 5, 6]
>
"""
@spec select(
df :: DataFrame.t(),
columns :: [String.t()],
keep_or_drop ::
:keep | :drop
) :: DataFrame.t()
def select(df, columns, keep_or_drop \\ :keep)
def select(df, columns, keep_or_drop) when is_list(columns) do
column_names = names(df)
Enum.each(columns, fn name ->
maybe_raise_column_not_found(column_names, name)
end)
apply_impl(df, :select, [columns, keep_or_drop])
end
@spec select(
df :: DataFrame.t(),
callback :: function(),
keep_or_drop ::
:keep | :drop
) :: DataFrame.t()
def select(df, callback, keep_or_drop) when is_function(callback),
do: df |> names() |> Enum.filter(callback) |> then(&select(df, &1, keep_or_drop))
@doc """
Subset rows using column values.
## Examples
You can pass a mask directly:
iex> df = Explorer.DataFrame.from_map(%{a: ["a", "b", "c"], b: [1, 2, 3]})
iex> Explorer.DataFrame.filter(df, Explorer.Series.greater(df["b"], 1))
#Explorer.DataFrame<
[rows: 2, columns: 2]
a string ["b", "c"]
b integer [2, 3]
>
Including a list:
iex> df = Explorer.DataFrame.from_map(%{a: ["a", "b", "c"], b: [1, 2, 3]})
iex> Explorer.DataFrame.filter(df, [false, true, false])
#Explorer.DataFrame<
[rows: 1, columns: 2]
a string ["b"]
b integer [2]
>
Or you can invoke a callback on the dataframe:
iex> df = Explorer.DataFrame.from_map(%{a: ["a", "b", "c"], b: [1, 2, 3]})
iex> Explorer.DataFrame.filter(df, &Explorer.Series.greater(&1["b"], 1))
#Explorer.DataFrame<
[rows: 2, columns: 2]
a string ["b", "c"]
b integer [2, 3]
>
"""
@spec filter(df :: DataFrame.t(), mask :: Series.t() | [boolean()]) :: DataFrame.t()
def filter(df, %Series{} = mask) do
s_len = Series.length(mask)
df_len = n_rows(df)
case s_len == df_len do
false ->
raise(ArgumentError,
message:
"Length of the mask (#{s_len}) must match number of rows in the dataframe (#{df_len})."
)
true ->
apply_impl(df, :filter, [mask])
end
end
def filter(df, mask) when is_list(mask), do: mask |> Series.from_list() |> then(&filter(df, &1))
@spec filter(df :: DataFrame.t(), callback :: function()) :: DataFrame.t()
def filter(df, callback) when is_function(callback),
do:
df
|> callback.()
|> then(
&filter(
df,
&1
)
)
@doc """
Creates and modifies columns.
Columns are added as keyword list arguments. New variables overwrite existing variables of the
same name. Column names are coerced from atoms to strings.
## Examples
You can pass in a list directly as a new column:
iex> df = Explorer.DataFrame.from_map(%{a: ["a", "b", "c"], b: [1, 2, 3]})
iex> Explorer.DataFrame.mutate(df, c: [4, 5, 6])
#Explorer.DataFrame<
[rows: 3, columns: 3]
a string ["a", "b", "c"]
b integer [1, 2, 3]
c integer [4, 5, 6]
>
Or you can pass in a series:
iex> df = Explorer.DataFrame.from_map(%{a: ["a", "b", "c"], b: [1, 2, 3]})
iex> s = Explorer.Series.from_list([4, 5, 6])
iex> Explorer.DataFrame.mutate(df, c: s)
#Explorer.DataFrame<
[rows: 3, columns: 3]
a string ["a", "b", "c"]
b integer [1, 2, 3]
c integer [4, 5, 6]
>
Or you can invoke a callback on the dataframe:
iex> df = Explorer.DataFrame.from_map(%{a: [4, 5, 6], b: [1, 2, 3]})
iex> Explorer.DataFrame.mutate(df, c: &Explorer.Series.add(&1["a"], &1["b"]))
#Explorer.DataFrame<
[rows: 3, columns: 3]
a integer [4, 5, 6]
b integer [1, 2, 3]
c integer [5, 7, 9]
>
You can overwrite existing columns:
iex> df = Explorer.DataFrame.from_map(%{a: ["a", "b", "c"], b: [1, 2, 3]})
iex> Explorer.DataFrame.mutate(df, a: [4, 5, 6])
#Explorer.DataFrame<
[rows: 3, columns: 2]
a integer [4, 5, 6]
b integer [1, 2, 3]
>
Scalar values are repeated to fill the series:
iex> df = Explorer.DataFrame.from_map(%{a: ["a", "b", "c"], b: [1, 2, 3]})
iex> Explorer.DataFrame.mutate(df, a: 4)
#Explorer.DataFrame<
[rows: 3, columns: 2]
a integer [4, 4, 4]
b integer [1, 2, 3]
>
Including when a callback returns a scalar:
iex> df = Explorer.DataFrame.from_map(%{a: ["a", "b", "c"], b: [1, 2, 3]})
iex> Explorer.DataFrame.mutate(df, a: &Explorer.Series.max(&1["b"]))
#Explorer.DataFrame<
[rows: 3, columns: 2]
a integer [3, 3, 3]
b integer [1, 2, 3]
>
Alternatively, all of the above works with a map instead of a keyword list:
iex> df = Explorer.DataFrame.from_map(%{a: ["a", "b", "c"], b: [1, 2, 3]})
iex> Explorer.DataFrame.mutate(df, %{"c" => [4, 5, 6]})
#Explorer.DataFrame<
[rows: 3, columns: 3]
a string ["a", "b", "c"]
b integer [1, 2, 3]
c integer [4, 5, 6]
>
"""
@spec mutate(df :: DataFrame.t(), with_columns :: map() | Keyword.t()) :: DataFrame.t()
def mutate(df, with_columns) when is_map(with_columns) do
with_columns = Enum.reduce(with_columns, %{}, &mutate_reducer(&1, &2, df))
apply_impl(df, :mutate, [with_columns])
end
def mutate(df, with_columns) when is_list(with_columns) do
if not Keyword.keyword?(with_columns), do: raise(ArgumentError, message: "Expected second
argument to be a keyword list.")
with_columns
|> Enum.reduce(%{}, fn {colname, value}, acc ->
Map.put(acc, Atom.to_string(colname), value)
end)
|> then(&mutate(df, &1))
end
defp mutate_reducer({colname, value}, acc, %DataFrame{} = _df)
when is_binary(colname) and is_map(acc),
do: Map.put(acc, colname, value)
defp mutate_reducer({colname, value}, acc, %DataFrame{} = df)
when is_atom(colname) and is_map(acc),
do: mutate_reducer({Atom.to_string(colname), value}, acc, df)
@doc """
Arranges/sorts rows by columns.
## Examples
A single column name will sort ascending by that column:
iex> df = Explorer.DataFrame.from_map(%{a: ["b", "c", "a"], b: [1, 2, 3]})
iex> Explorer.DataFrame.arrange(df, "a")
#Explorer.DataFrame<
[rows: 3, columns: 2]
a string ["a", "b", "c"]
b integer [3, 1, 2]
>
You can also sort descending:
iex> df = Explorer.DataFrame.from_map(%{a: ["b", "c", "a"], b: [1, 2, 3]})
iex> Explorer.DataFrame.arrange(df, desc: "a")
#Explorer.DataFrame<
[rows: 3, columns: 2]
a string ["c", "b", "a"]
b integer [2, 1, 3]
>
Sorting by more than one column sorts them in the order they are entered:
iex> df = Explorer.Datasets.fossil_fuels()
iex> Explorer.DataFrame.arrange(df, asc: "total", desc: "country")
#Explorer.DataFrame<
[rows: 1094, columns: 10]
year integer [2010, 2012, 2011, 2013, 2014, "..."]
country string ["ZIMBABWE", "ZIMBABWE", "ZIMBABWE", "ZIMBABWE", "ZIMBABWE", "..."]
total integer [2121, 2125, 2608, 3184, 3278, "..."]
solid_fuel integer [1531, 917, 1584, 1902, 2097, "..."]
liquid_fuel integer [481, 1006, 888, 1119, 1005, "..."]
gas_fuel integer [0, 0, 0, 0, 0, "..."]
cement integer [109, 201, 136, 162, 177, "..."]
gas_flaring integer [0, 0, 0, 0, 0, "..."]
per_capita float [0.15, 0.15, 0.18, 0.21, 0.22, "..."]
bunker_fuels integer [7, 9, 8, 9, 9, "..."]
>
"""
@spec arrange(
df :: DataFrame.t(),
columns ::
String.t() | [String.t() | {:asc | :desc, String.t()}]
) :: DataFrame.t()
def arrange(df, columns) when is_list(columns) do
columns = columns |> Enum.reduce([], &arrange_reducer/2) |> Enum.reverse()
column_names = names(df)
Enum.each(columns, fn {_dir, name} ->
maybe_raise_column_not_found(column_names, name)
end)
apply_impl(df, :arrange, [columns])
end
def arrange(df, column) when is_binary(column), do: arrange(df, [column])
defp arrange_reducer({dir, name}, acc)
when is_binary(name) and is_list(acc) and dir in [:asc, :desc],
do: [{dir, name} | acc]
defp arrange_reducer({dir, name}, acc)
when is_atom(name) and is_list(acc) and dir in [:asc, :desc],
do: arrange_reducer({dir, Atom.to_string(name)}, acc)
defp arrange_reducer(name, acc)
when is_binary(name) and is_list(acc),
do: arrange_reducer({:asc, name}, acc)
defp arrange_reducer(name, acc)
when is_atom(name) and is_list(acc),
do: arrange_reducer(Atom.to_string(name), acc)
@doc """
Takes distinct rows by a selection of columns.
## Examples
By default will return unique values of the requested columns:
iex> df = Explorer.Datasets.fossil_fuels()
iex> Explorer.DataFrame.distinct(df, columns: ["year", "country"])
#Explorer.DataFrame<
[rows: 1094, columns: 2]
year integer [2010, 2010, 2010, 2010, 2010, "..."]
country string ["AFGHANISTAN", "ALBANIA", "ALGERIA", "ANDORRA", "ANGOLA", "..."]
>
If `keep_all?` is set to `true`, then the first value of each column not in the requested
columns will be returned:
iex> df = Explorer.Datasets.fossil_fuels()
iex> Explorer.DataFrame.distinct(df, columns: ["year", "country"], keep_all?: true)
#Explorer.DataFrame<
[rows: 1094, columns: 10]
year integer [2010, 2010, 2010, 2010, 2010, "..."]
country string ["AFGHANISTAN", "ALBANIA", "ALGERIA", "ANDORRA", "ANGOLA", "..."]
total integer [2308, 1254, 32500, 141, 7924, "..."]
solid_fuel integer [627, 117, 332, 0, 0, "..."]
liquid_fuel integer [1601, 953, 12381, 141, 3649, "..."]
gas_fuel integer [74, 7, 14565, 0, 374, "..."]
cement integer [5, 177, 2598, 0, 204, "..."]
gas_flaring integer [0, 0, 2623, 0, 3697, "..."]
per_capita float [0.08, 0.43, 0.9, 1.68, 0.37, "..."]
bunker_fuels integer [9, 7, 663, 0, 321, "..."]
>
A callback on the dataframe's names can be passed instead of a list (like `select/3`):
iex> df = Explorer.DataFrame.from_map(%{x1: [1, 3, 3], x2: ["a", "c", "c"], y1: [1, 2, 3]})
iex> Explorer.DataFrame.distinct(df, columns: &String.starts_with?(&1, "x"))
#Explorer.DataFrame<
[rows: 2, columns: 2]
x1 integer [1, 3]
x2 string ["a", "c"]
>
"""
@spec distinct(df :: DataFrame.t(), opts :: Keyword.t()) :: DataFrame.t()
def distinct(df, opts \\ [])
def distinct(df, opts) do
opts = keyword!(opts, columns: [], keep_all?: false)
column_names = names(df)
columns =
case opts[:columns] do
[] -> column_names
callback when is_function(callback) -> Enum.filter(column_names, callback)
columns -> columns
end
Enum.each(columns, fn name ->
maybe_raise_column_not_found(column_names, name)
end)
apply_impl(df, :distinct, [columns, opts[:keep_all?]])
end
@doc """
Drop nil values.
Optionally accepts a subset of columns.
## Examples
iex> df = Explorer.DataFrame.from_map(%{a: [1, 2, nil], b: [1, nil, 3]})
iex> Explorer.DataFrame.drop_nil(df)
#Explorer.DataFrame<
[rows: 1, columns: 2]
a integer [1]
b integer [1]
>
"""
@spec drop_nil(df :: DataFrame.t(), columns_or_column :: [String.t()] | String.t()) ::
DataFrame.t()
def drop_nil(df, columns_or_column \\ [])
def drop_nil(df, []), do: drop_nil(df, names(df))
def drop_nil(df, column) when is_binary(column), do: drop_nil(df, [column])
def drop_nil(df, columns) when is_list(columns) do
column_names = names(df)
Enum.each(columns, fn name ->
maybe_raise_column_not_found(column_names, name)
end)
apply_impl(df, :drop_nil, [columns])
end
@doc """
Renames columns.
To apply a function to a subset of columns, see `rename_with/3`.
## Examples
You can pass in a list of new names:
iex> df = Explorer.DataFrame.from_map(%{a: ["a", "b", "a"], b: [1, 3, 1]})
iex> Explorer.DataFrame.rename(df, ["c", "d"])
#Explorer.DataFrame<
[rows: 3, columns: 2]
c string ["a", "b", "a"]
d integer [1, 3, 1]
>
Or you can rename individual columns using keyword args:
iex> df = Explorer.DataFrame.from_map(%{a: ["a", "b", "a"], b: [1, 3, 1]})
iex> Explorer.DataFrame.rename(df, a: "first")
#Explorer.DataFrame<
[rows: 3, columns: 2]
first string ["a", "b", "a"]
b integer [1, 3, 1]
>
Or you can rename individual columns using a map:
iex> df = Explorer.DataFrame.from_map(%{a: ["a", "b", "a"], b: [1, 3, 1]})
iex> Explorer.DataFrame.rename(df, %{"a" => "first"})
#Explorer.DataFrame<
[rows: 3, columns: 2]
first string ["a", "b", "a"]
b integer [1, 3, 1]
>
Or if you want to use a function:
iex> df = Explorer.DataFrame.from_map(%{a: ["a", "b", "a"], b: [1, 3, 1]})
iex> Explorer.DataFrame.rename(df, &(&1 <> "_test"))
#Explorer.DataFrame<
[rows: 3, columns: 2]
a_test string ["a", "b", "a"]
b_test integer [1, 3, 1]
>
"""
@spec rename(df :: DataFrame.t(), names :: [String.t() | atom()] | map()) :: DataFrame.t()
def rename(df, names) when is_list(names) do
case Keyword.keyword?(names) do
false ->
names =
Enum.map(names, fn
name when is_atom(name) -> Atom.to_string(name)
name -> name
end)
check_new_names_length(df, names)
apply_impl(df, :rename, [names])
true ->
names
|> Enum.reduce(%{}, &rename_reducer/2)
|> then(&rename(df, &1))
end
end
def rename(df, names) when is_map(names) do
names = Enum.reduce(names, %{}, &rename_reducer/2)
name_keys = Map.keys(names)
old_names = names(df)
Enum.each(name_keys, fn name ->
maybe_raise_column_not_found(old_names, name)
end)
old_names
|> Enum.map(fn name -> if name in name_keys, do: Map.get(names, name), else: name end)
|> then(&rename(df, &1))
end
def rename(df, names) when is_function(names),
do: df |> names() |> Enum.map(names) |> then(&rename(df, &1))
defp rename_reducer({k, v}, acc) when is_atom(k) and is_binary(v),
do: Map.put(acc, Atom.to_string(k), v)
defp rename_reducer({k, v}, acc) when is_binary(k) and is_binary(v), do: Map.put(acc, k, v)
defp rename_reducer({k, v}, acc) when is_atom(k) and is_atom(v),
do: Map.put(acc, Atom.to_string(k), Atom.to_string(v))
defp rename_reducer({k, v}, acc) when is_binary(k) and is_atom(v),
do: Map.put(acc, k, Atom.to_string(v))
defp check_new_names_length(df, names) do
width = n_cols(df)
n_new_names = length(names)
if width != n_new_names,
do:
raise(ArgumentError,
message:
"List of new names must match the number of columns in the dataframe. " <>
"Found #{n_new_names} new name(s), but the supplied dataframe has #{width} " <>
"column(s)."
)
end
@doc """
Renames columns with a function.
## Examples
If no columns are specified, it will apply the function to all column names:
iex> df = Explorer.Datasets.fossil_fuels()
iex> Explorer.DataFrame.rename_with(df, &String.upcase/1)
#Explorer.DataFrame<
[rows: 1094, columns: 10]
YEAR integer [2010, 2010, 2010, 2010, 2010, "..."]
COUNTRY string ["AFGHANISTAN", "ALBANIA", "ALGERIA", "ANDORRA", "ANGOLA", "..."]
TOTAL integer [2308, 1254, 32500, 141, 7924, "..."]
SOLID_FUEL integer [627, 117, 332, 0, 0, "..."]
LIQUID_FUEL integer [1601, 953, 12381, 141, 3649, "..."]
GAS_FUEL integer [74, 7, 14565, 0, 374, "..."]
CEMENT integer [5, 177, 2598, 0, 204, "..."]
GAS_FLARING integer [0, 0, 2623, 0, 3697, "..."]
PER_CAPITA float [0.08, 0.43, 0.9, 1.68, 0.37, "..."]
BUNKER_FUELS integer [9, 7, 663, 0, 321, "..."]
>
A callback can be used to filter the column names that will be renamed, similarly to `select/3`:
iex> df = Explorer.Datasets.fossil_fuels()
iex> Explorer.DataFrame.rename_with(df, &String.trim_trailing(&1, "_fuel"), &String.ends_with?(&1, "_fuel"))
#Explorer.DataFrame<
[rows: 1094, columns: 10]
year integer [2010, 2010, 2010, 2010, 2010, "..."]
country string ["AFGHANISTAN", "ALBANIA", "ALGERIA", "ANDORRA", "ANGOLA", "..."]
total integer [2308, 1254, 32500, 141, 7924, "..."]
solid integer [627, 117, 332, 0, 0, "..."]
liquid integer [1601, 953, 12381, 141, 3649, "..."]
gas integer [74, 7, 14565, 0, 374, "..."]
cement integer [5, 177, 2598, 0, 204, "..."]
gas_flaring integer [0, 0, 2623, 0, 3697, "..."]
per_capita float [0.08, 0.43, 0.9, 1.68, 0.37, "..."]
bunker_fuels integer [9, 7, 663, 0, 321, "..."]
>
Or you can just pass in the list of column names you'd like to apply the function to:
iex> df = Explorer.Datasets.fossil_fuels()
iex> Explorer.DataFrame.rename_with(df, &String.upcase/1, ["total", "cement"])
#Explorer.DataFrame<
[rows: 1094, columns: 10]
year integer [2010, 2010, 2010, 2010, 2010, "..."]
country string ["AFGHANISTAN", "ALBANIA", "ALGERIA", "ANDORRA", "ANGOLA", "..."]
TOTAL integer [2308, 1254, 32500, 141, 7924, "..."]
solid_fuel integer [627, 117, 332, 0, 0, "..."]
liquid_fuel integer [1601, 953, 12381, 141, 3649, "..."]
gas_fuel integer [74, 7, 14565, 0, 374, "..."]
CEMENT integer [5, 177, 2598, 0, 204, "..."]
gas_flaring integer [0, 0, 2623, 0, 3697, "..."]
per_capita float [0.08, 0.43, 0.9, 1.68, 0.37, "..."]
bunker_fuels integer [9, 7, 663, 0, 321, "..."]
>
"""
@spec rename_with(df :: DataFrame.t(), callback :: function(), columns :: list() | function()) ::
DataFrame.t()
def rename_with(df, callback, columns \\ [])
def rename_with(df, callback, []) when is_function(callback),
do: df |> names() |> Enum.map(callback) |> then(&rename(df, &1))
def rename_with(df, callback, columns) when is_function(callback) and is_list(columns) do
old_names = names(df)
Enum.each(columns, fn name ->
maybe_raise_column_not_found(old_names, name)
end)
old_names
|> Enum.map(fn name -> if name in columns, do: callback.(name), else: name end)
|> then(&rename(df, &1))
end
def rename_with(df, callback, columns) when is_function(callback) and is_function(columns) do
case df |> names() |> Enum.filter(columns) do
[_ | _] = columns ->
rename_with(df, callback, columns)
[] ->
raise ArgumentError, message: "Function to select column names did not return any names."
end
end
@doc """
Turns a set of columns to dummy variables.
"""
def dummies(df, columns), do: apply_impl(df, :dummies, [columns])
@doc """
Extracts a single column as a series.
## Examples
iex> df = Explorer.Datasets.fossil_fuels()
iex> Explorer.DataFrame.pull(df, "total")
#Explorer.Series<
integer[1094]
[2308, 1254, 32500, 141, 7924, 41, 143, 51246, 1150, 684, 106589, 18408, 8366, 451, 7981, 16345, 403, 17192, 30222, 147, 1388, 166, 133, 5802, 1278, 114468, 47, 2237, 12030, 535, 58, 1367, 145806, 152, 152, 72, 141, 19703, 2393248, 20773, 44, 540, 19, 2064, 1900, 5501, 10465, 2102, 30428, 18122, ...]
>
"""
@spec pull(df :: DataFrame.t(), column :: String.t()) :: Series.t()
def pull(df, column) do
names = names(df)
maybe_raise_column_not_found(names, column)
apply_impl(df, :pull, [column])
end
@doc """
Subset a continuous set of rows.
## Examples
iex> df = Explorer.Datasets.fossil_fuels()
iex> Explorer.DataFrame.slice(df, 1, 2)
#Explorer.DataFrame<
[rows: 2, columns: 10]
year integer [2010, 2010]
country string ["ALBANIA", "ALGERIA"]
total integer [1254, 32500]
solid_fuel integer [117, 332]
liquid_fuel integer [953, 12381]
gas_fuel integer [7, 14565]
cement integer [177, 2598]
gas_flaring integer [0, 2623]
per_capita float [0.43, 0.9]
bunker_fuels integer [7, 663]
>
Negative offsets count from the end of the series:
iex> df = Explorer.Datasets.fossil_fuels()
iex> Explorer.DataFrame.slice(df, -10, 2)
#Explorer.DataFrame<
[rows: 2, columns: 10]
year integer [2014, 2014]
country string ["UNITED STATES OF AMERICA", "URUGUAY"]
total integer [1432855, 1840]
solid_fuel integer [450047, 2]
liquid_fuel integer [576531, 1700]
gas_fuel integer [390719, 25]
cement integer [11314, 112]
gas_flaring integer [4244, 0]
per_capita float [4.43, 0.54]
bunker_fuels integer [30722, 251]
>
If the length would run past the end of the dataframe, the result may be shorter than the length:
iex> df = Explorer.Datasets.fossil_fuels()
iex> Explorer.DataFrame.slice(df, -10, 20)
#Explorer.DataFrame<
[rows: 10, columns: 10]
year integer [2014, 2014, 2014, 2014, 2014, "..."]
country string ["UNITED STATES OF AMERICA", "URUGUAY", "UZBEKISTAN", "VANUATU", "VENEZUELA", "..."]
total integer [1432855, 1840, 28692, 42, 50510, "..."]
solid_fuel integer [450047, 2, 1677, 0, 204, "..."]
liquid_fuel integer [576531, 1700, 2086, 42, 28445, "..."]
gas_fuel integer [390719, 25, 23929, 0, 12731, "..."]
cement integer [11314, 112, 1000, 0, 1088, "..."]
gas_flaring integer [4244, 0, 0, 0, 8042, "..."]
per_capita float [4.43, 0.54, 0.97, 0.16, 1.65, "..."]
bunker_fuels integer [30722, 251, 0, 10, 1256, "..."]
>
"""
def slice(df, offset, length), do: apply_impl(df, :slice, [offset, length])
@doc """
Subset rows with a list of indices.
## Examples
iex> df = Explorer.DataFrame.from_map(%{a: [1, 2, 3], b: ["a", "b", "c"]})
iex> Explorer.DataFrame.take(df, [0, 2])
#Explorer.DataFrame<
[rows: 2, columns: 2]
a integer [1, 3]
b string ["a", "c"]
>
"""
def take(df, row_indices) when is_list(row_indices) do
n_rows = n_rows(df)
Enum.each(row_indices, fn idx ->
if idx > n_rows or idx < -n_rows,
do:
raise(
ArgumentError,
"Requested row index (#{idx}) out of bounds (-#{n_rows}:#{n_rows})."
)
end)
apply_impl(df, :take, [row_indices])
end
@doc """
Sample rows from a dataframe.
If given an integer as the second argument, it will return N samples. If given a float, it will
return that proportion of the series.
Can sample with or without replacement.
## Options
* `with_replacement?` - If set to `true`, each sample will be independent and therefore values may repeat. Required to be `true` for `n` greater then the number of rows in the dataframe or `frac` > 1.0. (default: `false`)
* `seed` - An integer to be used as a random seed. If nil, a random value between 1 and 1e12 will be used. (default: nil)
## Examples
You can sample N rows:
iex> df = Explorer.Datasets.fossil_fuels()
iex> Explorer.DataFrame.sample(df, 3, seed: 100)
#Explorer.DataFrame<
[rows: 3, columns: 10]
year integer [2012, 2012, 2013]
country string ["ZIMBABWE", "NICARAGUA", "NIGER"]
total integer [2125, 1260, 529]
solid_fuel integer [917, 0, 93]
liquid_fuel integer [1006, 1176, 432]
gas_fuel integer [0, 0, 0]
cement integer [201, 84, 4]
gas_flaring integer [0, 0, 0]
per_capita float [0.15, 0.21, 0.03]
bunker_fuels integer [9, 18, 19]
>
Or you can sample a proportion of rows:
iex> df = Explorer.Datasets.fossil_fuels()
iex> Explorer.DataFrame.sample(df, 0.03, seed: 100)
#Explorer.DataFrame<
[rows: 33, columns: 10]
year integer [2013, 2012, 2013, 2012, 2010, "..."]
country string ["BAHAMAS", "POLAND", "SLOVAKIA", "MOZAMBIQUE", "OMAN", "..."]
total integer [764, 81792, 9024, 851, 12931, "..."]
solid_fuel integer [1, 53724, 3657, 11, 0, "..."]
liquid_fuel integer [763, 17353, 2090, 632, 2331, "..."]
gas_fuel integer [0, 8544, 2847, 47, 9309, "..."]
cement integer [0, 2165, 424, 161, 612, "..."]
gas_flaring integer [0, 6, 7, 0, 679, "..."]
per_capita float [2.02, 2.12, 1.67, 0.03, 4.39, "..."]
bunker_fuels integer [167, 573, 34, 56, 1342, "..."]
>
"""
@spec sample(df :: DataFrame.t(), n_or_frac :: number(), opts :: Keyword.t()) :: DataFrame.t()
def sample(df, n_or_frac, opts \\ [])
def sample(df, n, opts) when is_integer(n) do
opts = keyword!(opts, with_replacement?: false, seed: Enum.random(1..1_000_000_000_000))
n_rows = n_rows(df)
case {n > n_rows, opts[:with_replacement?]} do
{true, false} ->
raise ArgumentError,
message:
"In order to sample more rows than are in the dataframe (#{n_rows}), sampling " <>
"`with_replacement?` must be true."
_ ->
:ok
end
apply_impl(df, :sample, [n, opts[:with_replacement?], opts[:seed]])
end
def sample(df, frac, opts) when is_float(frac) do
n_rows = n_rows(df)
n = round(frac * n_rows)
sample(df, n, opts)
end
@doc """
Pivot data from wide to long.
`Explorer.DataFrame.pivot_longer/3` "lengthens" data, increasing the number of rows and
decreasing the number of columns. The inverse transformation is
`Explorer.DataFrame.pivot_wider/4`.
The second argument (`columns`) can be either an array of column names to use or a filter callback on
the dataframe's names.
`value_cols` must all have the same dtype.
## Options
* `value_cols` - Columns to use for values. May be a filter callback on the dataframe's column names. Defaults to an empty list, using all variables except the columns to pivot.
* `names_to` - A string specifying the name of the column to create from the data stored in the column names of the dataframe. Defaults to `"variable"`.
* `values_to` - A string specifying the name of the column to create from the data stored in series element values. Defaults to `"value"`.
## Examples
iex> df = Explorer.Datasets.fossil_fuels()
iex> Explorer.DataFrame.pivot_longer(df, ["year", "country"], value_cols: &String.ends_with?(&1, "fuel"))
#Explorer.DataFrame<
[rows: 3282, columns: 4]
year integer [2010, 2010, 2010, 2010, 2010, "..."]
country string ["AFGHANISTAN", "ALBANIA", "ALGERIA", "ANDORRA", "ANGOLA", "..."]
variable string ["solid_fuel", "solid_fuel", "solid_fuel", "solid_fuel", "solid_fuel", "..."]
value integer [627, 117, 332, 0, 0, "..."]
>
"""
@spec pivot_longer(
df :: DataFrame.t(),
columns :: [String.t()] | function(),
opts :: Keyword.t()
) :: DataFrame.t()
def pivot_longer(df, columns, opts \\ [])
def pivot_longer(df, columns, opts) when is_list(columns) do
opts = keyword!(opts, value_cols: [], names_to: "variable", values_to: "value")
names = names(df)
dtypes = names |> Enum.zip(dtypes(df)) |> Enum.into(%{})
Enum.each(columns, fn name -> maybe_raise_column_not_found(names, name) end)
value_cols =
case opts[:value_cols] do
[] ->
Enum.filter(names, fn name -> name not in columns end)
[_ | _] = cols ->
Enum.each(cols, fn col ->
if col in columns,
do:
raise(ArgumentError,
message: "Value columns may not also be ID columns. Found #{col} in both."
)
end)
cols
callback when is_function(callback) ->
names
|> Enum.filter(fn name -> name not in columns end)
|> Enum.filter(callback)
end
Enum.each(value_cols, fn name -> maybe_raise_column_not_found(names, name) end)
dtypes
|> Map.take(value_cols)
|> Map.values()
|> Enum.uniq()
|> length()
|> case do
1 ->
:ok
_ ->
raise ArgumentError,
message: "Value columns may only include one dtype. Found multiple dtypes."
end
apply_impl(df, :pivot_longer, [columns, value_cols, opts[:names_to], opts[:values_to]])
end
def pivot_longer(df, columns, opts) when is_function(columns),
do:
df
|> names()
|> columns.()
|> then(&pivot_longer(df, &1, opts))
@doc """
Pivot data from long to wide.
`Explorer.DataFrame.pivot_wider/4` "widens" data, increasing the number of columns and
decreasing the number of rows. The inverse transformation is
`Explorer.DataFrame.pivot_longer/3`.
Due to a restriction upstream, `values_from` must be a numeric type.
## Options
* `id_cols` - A set of columns that uniquely identifies each observation. Defaults to all columns in data except for the columns specified in `names_from` and `values_from`. Typically used when you have redundant variables, i.e. variables whose values are perfectly correlated with existing variables. May accept a filter callback or list of column names.
* `names_prefix` - String added to the start of every variable name. This is particularly useful if `names_from` is a numeric vector and you want to create syntactic variable names.
"""
@spec pivot_wider(
df :: DataFrame.t(),
names_from :: String.t(),
values_from :: String.t(),
opts ::
Keyword.t()
) :: DataFrame.t()
def pivot_wider(df, names_from, values_from, opts \\ []) do
opts = keyword!(opts, id_cols: [], names_prefix: "")
names = names(df)
dtypes = names |> Enum.zip(dtypes(df)) |> Enum.into(%{})
case Map.get(dtypes, values_from) do
:integer ->
:ok
:float ->
:ok
:date ->
:ok
:datetime ->
:ok
dtype ->
raise ArgumentError, message: "The values_from column must be numeric. Found #{dtype}."
end
id_cols =
case opts[:id_cols] do
[] ->
Enum.filter(names, &(&1 not in [names_from, values_from]))
[_ | _] = names ->
Enum.filter(names, &(&1 not in [names_from, values_from]))
fun when is_function(fun) ->
names |> Enum.filter(fun) |> Enum.filter(&(&1 not in [names_from, values_from]))
end
Enum.each(id_cols ++ [names_from, values_from], fn name ->
maybe_raise_column_not_found(names, name)
end)
apply_impl(df, :pivot_wider, [id_cols, names_from, values_from, opts[:names_prefix]])
end
# Two table verbs
@doc """
Join two tables.
## Join types
* `inner` - Returns all rows from `left` where there are matching values in `right`, and all columns from `left` and `right`.
* `left` - Returns all rows from `left` and all columns from `left` and `right`. Rows in `left` with no match in `right` will have `nil` values in the new columns.
* `right` - Returns all rows from `right` and all columns from `left` and `right`. Rows in `right` with no match in `left` will have `nil` values in the new columns.
* `outer` - Returns all rows and all columns from both `left` and `right`. Where there are not matching values, returns `nil` for the one missing.
* `cross` - Also known as a cartesian join. Returns all combinations of `left` and `right`. Can be very computationally expensive.
## Options
* `on` - The columns to join on. Defaults to overlapping columns. Does not apply to cross join.
* `how` - One of the join types (as an atom) described above. Defaults to `:inner`.
## Examples
Inner join:
iex> left = Explorer.DataFrame.from_map(%{a: [1, 2, 3], b: ["a", "b", "c"]})
iex> right = Explorer.DataFrame.from_map(%{a: [1, 2, 2], c: ["d", "e", "f"]})
iex> Explorer.DataFrame.join(left, right)
#Explorer.DataFrame<
[rows: 3, columns: 3]
a integer [1, 2, 2]
b string ["a", "b", "b"]
c string ["d", "e", "f"]
>
Left join:
iex> left = Explorer.DataFrame.from_map(%{a: [1, 2, 3], b: ["a", "b", "c"]})
iex> right = Explorer.DataFrame.from_map(%{a: [1, 2, 2], c: ["d", "e", "f"]})
iex> Explorer.DataFrame.join(left, right, how: :left)
#Explorer.DataFrame<
[rows: 4, columns: 3]
a integer [1, 2, 2, 3]
b string ["a", "b", "b", "c"]
c string ["d", "e", "f", nil]
>
Right join:
iex> left = Explorer.DataFrame.from_map(%{a: [1, 2, 3], b: ["a", "b", "c"]})
iex> right = Explorer.DataFrame.from_map(%{a: [1, 2, 4], c: ["d", "e", "f"]})
iex> Explorer.DataFrame.join(left, right, how: :right)
#Explorer.DataFrame<
[rows: 3, columns: 3]
a integer [1, 2, 4]
c string ["d", "e", "f"]
b string ["a", "b", nil]
>
Outer join:
iex> left = Explorer.DataFrame.from_map(%{a: [1, 2, 3], b: ["a", "b", "c"]})
iex> right = Explorer.DataFrame.from_map(%{a: [1, 2, 4], c: ["d", "e", "f"]})
iex> Explorer.DataFrame.join(left, right, how: :outer)
#Explorer.DataFrame<
[rows: 4, columns: 3]
b string ["a", "b", nil, "c"]
a integer [1, 2, 4, 3]
c string ["d", "e", "f", nil]
>
Cross join:
iex> left = Explorer.DataFrame.from_map(%{a: [1, 2, 3], b: ["a", "b", "c"]})
iex> right = Explorer.DataFrame.from_map(%{a: [1, 2, 4], c: ["d", "e", "f"]})
iex> Explorer.DataFrame.join(left, right, how: :cross)
#Explorer.DataFrame<
[rows: 9, columns: 4]
a integer [1, 1, 1, 2, 2, "..."]
b string ["a", "a", "a", "b", "b", "..."]
a_right integer [1, 2, 4, 1, 2, "..."]
c string ["d", "e", "f", "d", "e", "..."]
>
"""
def join(%DataFrame{} = left, %DataFrame{} = right, opts \\ []) do
left_cols = names(left)
right_cols = names(right)
opts = keyword!(opts, on: find_overlapping_cols(left_cols, right_cols), how: :inner)
case {opts[:on], opts[:how]} do
{_, :cross} ->
nil
{[], _} ->
raise(ArgumentError, message: "Could not find any overlapping columns.")
{[_ | _] = on, _} ->
Enum.each(on, fn name ->
maybe_raise_column_not_found(left_cols, name)
maybe_raise_column_not_found(right_cols, name)
end)
_ ->
nil
end
apply_impl(left, :join, [right, opts[:on], opts[:how]])
end
defp find_overlapping_cols(left_cols, right_cols) do
left_cols = MapSet.new(left_cols)
right_cols = MapSet.new(right_cols)
left_cols |> MapSet.intersection(right_cols) |> MapSet.to_list()
end
# Groups
@doc """
Group the dataframe by one or more variables.
When the dataframe has grouping variables, operations are performed per group.
`Explorer.DataFrame.ungroup/2` removes grouping.
## Examples
You can group by a single variable:
iex> df = Explorer.Datasets.fossil_fuels()
iex> Explorer.DataFrame.group_by(df, "country")
#Explorer.DataFrame<
[rows: 1094, columns: 10, groups: ["country"]]
year integer [2010, 2010, 2010, 2010, 2010, "..."]
country string ["AFGHANISTAN", "ALBANIA", "ALGERIA", "ANDORRA", "ANGOLA", "..."]
total integer [2308, 1254, 32500, 141, 7924, "..."]
solid_fuel integer [627, 117, 332, 0, 0, "..."]
liquid_fuel integer [1601, 953, 12381, 141, 3649, "..."]
gas_fuel integer [74, 7, 14565, 0, 374, "..."]
cement integer [5, 177, 2598, 0, 204, "..."]
gas_flaring integer [0, 0, 2623, 0, 3697, "..."]
per_capita float [0.08, 0.43, 0.9, 1.68, 0.37, "..."]
bunker_fuels integer [9, 7, 663, 0, 321, "..."]
>
Or you can group by multiple:
iex> df = Explorer.Datasets.fossil_fuels()
iex> Explorer.DataFrame.group_by(df, ["country", "year"])
#Explorer.DataFrame<
[rows: 1094, columns: 10, groups: ["country", "year"]]
year integer [2010, 2010, 2010, 2010, 2010, "..."]
country string ["AFGHANISTAN", "ALBANIA", "ALGERIA", "ANDORRA", "ANGOLA", "..."]
total integer [2308, 1254, 32500, 141, 7924, "..."]
solid_fuel integer [627, 117, 332, 0, 0, "..."]
liquid_fuel integer [1601, 953, 12381, 141, 3649, "..."]
gas_fuel integer [74, 7, 14565, 0, 374, "..."]
cement integer [5, 177, 2598, 0, 204, "..."]
gas_flaring integer [0, 0, 2623, 0, 3697, "..."]
per_capita float [0.08, 0.43, 0.9, 1.68, 0.37, "..."]
bunker_fuels integer [9, 7, 663, 0, 321, "..."]
>
"""
@spec group_by(df :: DataFrame.t(), groups_or_group :: [String.t()] | String.t()) ::
DataFrame.t()
def group_by(df, groups) when is_list(groups) do
names = names(df)
Enum.each(groups, fn name -> maybe_raise_column_not_found(names, name) end)
apply_impl(df, :group_by, [groups])
end
def group_by(df, group) when is_binary(group), do: group_by(df, [group])
@doc """
Removes grouping variables.
## Examples
iex> df = Explorer.Datasets.fossil_fuels()
iex> df = Explorer.DataFrame.group_by(df, ["country", "year"])
iex> Explorer.DataFrame.ungroup(df, ["country"])
#Explorer.DataFrame<
[rows: 1094, columns: 10, groups: ["year"]]
year integer [2010, 2010, 2010, 2010, 2010, "..."]
country string ["AFGHANISTAN", "ALBANIA", "ALGERIA", "ANDORRA", "ANGOLA", "..."]
total integer [2308, 1254, 32500, 141, 7924, "..."]
solid_fuel integer [627, 117, 332, 0, 0, "..."]
liquid_fuel integer [1601, 953, 12381, 141, 3649, "..."]
gas_fuel integer [74, 7, 14565, 0, 374, "..."]
cement integer [5, 177, 2598, 0, 204, "..."]
gas_flaring integer [0, 0, 2623, 0, 3697, "..."]
per_capita float [0.08, 0.43, 0.9, 1.68, 0.37, "..."]
bunker_fuels integer [9, 7, 663, 0, 321, "..."]
>
"""
@spec ungroup(df :: DataFrame.t(), groups_or_group :: [String.t()] | String.t()) ::
DataFrame.t()
def ungroup(df, groups \\ [])
def ungroup(df, groups) when is_list(groups) do
current_groups = groups(df)
Enum.each(groups, fn group ->
if group not in current_groups,
do:
raise(ArgumentError,
message: "Could not find #{group} in current groups (#{current_groups})."
)
end)
apply_impl(df, :ungroup, [groups])
end
def ungroup(df, group) when is_binary(group), do: ungroup(df, [group])
@supported_aggs ~w[min max sum mean median first last count n_unique]a
@doc """
Summarise each group to a single row.
Implicitly ungroups.
## Supported operations
The following aggregations may be performed:
* `:min` - Take the minimum value within the group. See `Explorer.Series.min/1`.
* `:max` - Take the maximum value within the group. See `Explorer.Series.max/1`.
* `:sum` - Take the sum of the series within the group. See `Explorer.Series.sum/1`.
* `:mean` - Take the mean of the series within the group. See `Explorer.Series.mean/1`.
* `:median` - Take the median of the series within the group. See `Explorer.Series.median/1`.
* `:first` - Take the first value within the group. See `Explorer.Series.first/1`.
* `:last` - Take the last value within the group. See `Explorer.Series.last/1`.
* `:count` - Count the number of rows per group.
* `:n_unique` - Count the number of unique rows per group.
## Examples
iex> df = Explorer.Datasets.fossil_fuels()
iex> df |> Explorer.DataFrame.group_by("year") |> Explorer.DataFrame.summarise(total: [:max, :min], country: [:n_unique])
#Explorer.DataFrame<
[rows: 5, columns: 4]
year integer [2010, 2011, 2012, 2013, 2014]
country_n_unique integer [217, 217, 220, 220, 220]
total_max integer [2393248, 2654360, 2734817, 2797384, 2806634]
total_min integer [1, 2, 2, 2, 3]
>
"""
@spec summarise(df :: DataFrame.t(), with_columns :: Keyword.t() | map()) :: DataFrame.t()
def summarise(%DataFrame{groups: []}, _),
do:
raise(ArgumentError,
message: "Dataframe must be grouped in order to perform summarisation."
)
def summarise(df, with_columns) when is_map(with_columns) do
with_columns = Enum.reduce(with_columns, %{}, &summarise_reducer(&1, &2, df))
columns = names(df)
Enum.each(with_columns, fn {name, values} ->
maybe_raise_column_not_found(columns, name)
unless values |> MapSet.new() |> MapSet.subset?(MapSet.new(@supported_aggs)) do
unsupported = values |> MapSet.difference(@supported_aggs) |> MapSet.to_list()
raise ArgumentError, message: "Found unsupported aggregations #{inspect(unsupported)}."
end
end)
apply_impl(df, :summarise, [with_columns])
end
def summarise(df, with_columns) when is_list(with_columns) do
if not Keyword.keyword?(with_columns), do: raise(ArgumentError, message: "Expected second
argument to be a keyword list.")
with_columns
|> Enum.reduce(%{}, fn {colname, value}, acc ->
Map.put(acc, Atom.to_string(colname), value)
end)
|> then(&summarise(df, &1))
end
defp summarise_reducer({colname, value}, acc, %DataFrame{} = _df)
when is_binary(colname) and is_map(acc) and is_list(value),
do: Map.put(acc, colname, value)
defp summarise_reducer({colname, value}, acc, %DataFrame{} = df)
when is_atom(colname) and is_map(acc) and is_list(value),
do: mutate_reducer({Atom.to_string(colname), value}, acc, df)
# Helpers
defp backend_from_options!(opts) do
backend = Explorer.Shared.backend_from_options!(opts) || Explorer.default_backend()
Module.concat(backend, "DataFrame")
end
defp apply_impl(df, fun, args \\ []) do
impl = impl!(df)
apply(impl, fun, [df | args])
end
defp maybe_raise_column_not_found(names, name) do
if name not in names,
do:
raise(ArgumentError,
message:
List.to_string(
["Could not find column name \"#{name}\""] ++ did_you_mean(name, names)
)
)
end
@threshold 0.77
@max_suggestions 5
defp did_you_mean(missing_key, available_keys) do
suggestions =
for key <- available_keys,
distance = String.jaro_distance(missing_key, key),
distance >= @threshold,
do: {distance, key}
case suggestions do
[] -> []
suggestions -> [". Did you mean:\n\n" | format_suggestions(suggestions)]
end
end
defp format_suggestions(suggestions) do
suggestions
|> Enum.sort(&(elem(&1, 0) >= elem(&2, 0)))
|> Enum.take(@max_suggestions)
|> Enum.sort(&(elem(&1, 1) <= elem(&2, 1)))
|> Enum.map(fn {_, key} -> [" * ", inspect(key), ?\n] end)
end
end
|
lib/explorer/data_frame.ex
| 0.904904
| 0.711894
|
data_frame.ex
|
starcoder
|
defmodule Dnsimple.Registrar do
@moduledoc """
Provides functions to interact with the
[registrar endpoints](https://developer.dnsimple.com/v2/registrar/).
See:
- https://developer.dnsimple.com/v2/registrar/
- https://developer.dnsimple.com/v2/registrar/auto-renewal/
- https://developer.dnsimple.com/v2/registrar/whois-privacy/
- https://developer.dnsimple.com/v2/registrar/delegation/
"""
alias Dnsimple.Client
alias Dnsimple.Response
alias Dnsimple.DomainCheck
alias Dnsimple.DomainPremiumPrice
alias Dnsimple.DomainRegistration
alias Dnsimple.DomainRenewal
alias Dnsimple.DomainTransfer
alias Dnsimple.WhoisPrivacy
alias Dnsimple.WhoisPrivacyRenewal
alias Dnsimple.VanityNameServer
@doc """
Checks if a domain name is available to be registered and whether premium
pricing applies to that domain name.
See:
- https://developer.dnsimple.com/v2/registrar/#check
## Examples:
client = %Dnsimple.Client{access_token: "a<PASSWORD>"}
{:ok, response} = Dnsimple.Registrar.check_domain(client, account_id = 1010, domain_id = "example.com")
"""
@spec check_domain(Client.t, String.t, String.t, keyword()) :: {:ok|:error, Response.t}
def check_domain(client, account_id, domain_name, options \\ []) do
url = Client.versioned("/#{account_id}/registrar/domains/#{domain_name}/check")
Client.get(client, url, options)
|> Response.parse(%{"data" => %DomainCheck{}})
end
@doc """
Gets the premium price for a domain.
See:
- https://developer.dnsimple.com/v2/registrar/#premium-price
## Examples:
client = %Dnsimple.Client{access_token: "a<PASSWORD>"}
{:ok, response} = Dnsimple.Registrar.get_domain_premium_price(client, account_id = "1010", domain_id = "example.com")
{:ok, response} = Dnsimple.Registrar.get_domain_premium_price(client, account_id = "1010", domain_id = "example.com", %{action: "registration"})
{:ok, response} = Dnsimple.Registrar.get_domain_premium_price(client, account_id = "1010", domain_id = "example.com", %{action: "renewal"})
{:ok, response} = Dnsimple.Registrar.get_domain_premium_price(client, account_id = "1010", domain_id = "example.com", %{action: "transfer"})
"""
@spec get_domain_premium_price(Client.t, String.t, String.t, map(), keyword()) :: {:ok|:error, Response.t}
def get_domain_premium_price(client, account_id, domain_name, params \\ %{}, options \\ []) do
url = Client.versioned("/#{account_id}/registrar/domains/#{domain_name}/premium_price")
options = Keyword.put(options, :action, Map.get(params, :action))
Client.get(client, url, options)
|> Response.parse(%{"data" => %DomainPremiumPrice{}})
end
@doc """
Registers a domain.
See:
- https://developer.dnsimple.com/v2/registrar/#register
## Examples:
client = %Dnsimple.Client{access_token: "<PASSWORD>"}
{:ok, response} = Dnsimple.Registrar.register_domain(client, account_id = 1010, domain_id = "example.com", %{
registrant_id: 1,
privacy: true,
auto_renew: false,
})
"""
@spec register_domain(Client.t, String.t, String.t, Keyword.t, Keyword.t) :: {:ok|:error, Response.t}
def register_domain(client, account_id, domain_name, attributes \\ [], options \\ []) do
url = Client.versioned("/#{account_id}/registrar/domains/#{domain_name}/registrations")
Client.post(client, url, attributes, options)
|> Response.parse(%{"data" => %DomainRegistration{}})
end
@doc """
Renews a domain.
See:
- https://developer.dnsimple.com/v2/registrar/#renew
## Examples:
client = %Dnsimple.Client{access_token: "<PASSWORD>"}
{:ok, response} = Dnsimple.Registrar.renew_domain(client, account_id = 1010, domain_id = "example.com")
{:ok, response} = Dnsimple.Registrar.renew_domain(client, account_id = 1010, domain_id = "example.com", %{period: 5})
"""
@spec renew_domain(Client.t, String.t, String.t, Keyword.t, Keyword.t) :: {:ok|:error, Response.t}
def renew_domain(client, account_id, domain_name, attributes \\ [], options \\ []) do
url = Client.versioned("/#{account_id}/registrar/domains/#{domain_name}/renewals")
Client.post(client, url, attributes, options)
|> Response.parse(%{"data" => %DomainRenewal{}})
end
@doc """
Starts the transfer of a domain to DNSimple.
See:
- https://developer.dnsimple.com/v2/registrar/#transfer
## Examples:
client = %Dnsimple.Client{access_token: "a<PASSWORD>"}
{:ok, response} = Dnsimple.Registrar.transfer_domain(client, account_id = 1010, domain_id = "example.com", %{
registrant_id: 1,
auth_code: "XXXXXXXXX",
privacy: true,
auto_renew: true,
})
"""
@spec transfer_domain(Client.t, String.t, String.t, Keyword.t, Keyword.t) :: {:ok|:error, Response.t}
def transfer_domain(client, account_id, domain_name, attributes \\ [], options \\ []) do
url = Client.versioned("/#{account_id}/registrar/domains/#{domain_name}/transfers")
Client.post(client, url, attributes, options)
|> Response.parse(%{"data" => %DomainTransfer{}})
end
@doc """
Retrieves the details of an existing domain transfer.
See:
- https://developer.dnsimple.com/v2/registrar/#getDomainTransfer
## Examples:
client = %Dnsimple.Client{access_token: "a1b2c3d4"}
{:ok, response} = Dnsimple.Registrar.get_domain_transfer(client, account_id = 1010, domain_name = "example.com", transfer_id = 42)
"""
@spec get_domain_transfer(Client.t, String.t | integer, String.t, String.t | integer, Keyword.t) :: {:ok|:error, Response.t}
def get_domain_transfer(client, account_id, domain_name, domain_transfer_id, options \\ []) do
url = Client.versioned("/#{account_id}/registrar/domains/#{domain_name}/transfers/#{domain_transfer_id}")
Client.get(client, url, options)
|> Response.parse(%{"data" => %DomainTransfer{}})
end
@doc """
Cancels an in progress domain transfer.
See:
- https://developer.dnsimple.com/v2/registrar/#cancelDomainTransfer
## Examples:
client = %Dnsimple.Client{access_token: "<PASSWORD>"}
{:ok, response} = Dnsimple.Registrar.cancel_domain_transfer(client, account_id = 1010, domain_name = "example.com", transfer_id = 42)
"""
@spec cancel_domain_transfer(Client.t, String.t | integer, String.t, String.t | integer, Keyword.t) :: {:ok|:error, Response.t}
def cancel_domain_transfer(client, account_id, domain_name, domain_transfer_id, options \\ []) do
url = Client.versioned("/#{account_id}/registrar/domains/#{domain_name}/transfers/#{domain_transfer_id}")
Client.delete(client, url, options)
|> Response.parse(%{"data" => %DomainTransfer{}})
end
@doc """
Requests the transfer of a domain out of DNSimple.
See:
- https://developer.dnsimple.com/v2/registrar/#transfer_out
## Examples:
client = %Dnsimple.Client{access_token: "a<PASSWORD>"}
{:ok, response} = Dnsimple.Registrar.transfer_domain_out(client, account_id = 1010, domain_id = "example.com")
"""
@spec transfer_domain_out(Client.t, String.t, String.t, Keyword.t) :: {:ok|:error, Response.t}
def transfer_domain_out(client, account_id, domain_name, options \\ []) do
url = Client.versioned("/#{account_id}/registrar/domains/#{domain_name}/authorize_transfer_out")
Client.post(client, url, Client.empty_body(), options)
|> Response.parse(nil)
end
@doc """
Enables auto-renewal for the domain.
See:
- https://developer.dnsimple.com/v2/registrar/auto-renewal/#enable
## Examples:
client = %Dnsimple.Client{access_token: "a<PASSWORD>"}
{:ok, response} = Dnsimple.Registrar.enable_domain_auto_renewal(client, account_id = 1010, domain_id = "example.com")
"""
@spec enable_domain_auto_renewal(Client.t, integer | String.t, String.t, Keyword.t) :: {:ok|:error, Response.t}
def enable_domain_auto_renewal(client, account_id, domain_name, options \\ []) do
url = Client.versioned("/#{account_id}/registrar/domains/#{domain_name}/auto_renewal")
Client.put(client, url, Client.empty_body(), options)
|> Response.parse(nil)
end
@doc """
Disables auto-renewal for the domain.
See:
- https://developer.dnsimple.com/v2/registrar/auto-renewal/#disable
## Examples:
client = %Dnsimple.Client{access_token: "<PASSWORD>"}
{:ok, response} = Dnsimple.Registrar.disable_domain_auto_renewal(client, account_id = 1010, domain_id = "example.com")
"""
@spec disable_domain_auto_renewal(Client.t, integer | String.t, String.t, Keyword.t) :: {:ok|:error, Response.t}
def disable_domain_auto_renewal(client, account_id, domain_name, options \\ []) do
url = Client.versioned("/#{account_id}/registrar/domains/#{domain_name}/auto_renewal")
Client.delete(client, url, options)
|> Response.parse(nil)
end
@doc """
Returns the whois privacy of the domain.
See:
- https://developer.dnsimple.com/v2/registrar/whois-privacy/#get
## Examples:
client = %Dnsimple.Client{access_token: "<PASSWORD>"}
{:ok, response} = Dnsimple.Registrar.get_whois_privacy(client, account_id = 1010, domain_id = "example.com")
"""
@spec get_whois_privacy(Client.t, integer | String.t, String.t, Keyword.t) :: {:ok|:error, Response.t}
def get_whois_privacy(client, account_id, domain_name, options \\ []) do
url = Client.versioned("/#{account_id}/registrar/domains/#{domain_name}/whois_privacy")
Client.get(client, url, options)
|> Response.parse(%{"data" => %WhoisPrivacy{}})
end
@doc """
Enables whois privacy for the domain.
See:
- https://developer.dnsimple.com/v2/registrar/whois-privacy/#enable
## Examples:
client = %Dnsimple.Client{access_token: "<PASSWORD>"}
{:ok, response} = Dnsimple.Registrar.enable_whois_privacy(client, account_id = 1010, domain_id = "example.com")
"""
@spec enable_whois_privacy(Client.t, integer | String.t, String.t, Keyword.t) :: {:ok|:error, Response.t}
def enable_whois_privacy(client, account_id, domain_name, options \\ []) do
url = Client.versioned("/#{account_id}/registrar/domains/#{domain_name}/whois_privacy")
Client.put(client, url, Client.empty_body(), options)
|> Response.parse(%{"data" => %WhoisPrivacy{}})
end
@doc """
Disables whois privacy for the domain.
See:
- https://developer.dnsimple.com/v2/registrar/whois-privacy/#disable
## Examples:
client = %Dnsimple.Client{access_token: "<PASSWORD>"}
{:ok, response} = Dnsimple.Registrar.disable_whois_privacy(client, account_id = 1010, domain_id = "example.com")
"""
@spec disable_whois_privacy(Client.t, integer | String.t, String.t, Keyword.t) :: {:ok|:error, Response.t}
def disable_whois_privacy(client, account_id, domain_name, options \\ []) do
url = Client.versioned("/#{account_id}/registrar/domains/#{domain_name}/whois_privacy")
Client.delete(client, url, options)
|> Response.parse(%{"data" => %WhoisPrivacy{}})
end
@doc """
Renews whois privacy for the domain.
See:
- https://developer.dnsimple.com/v2/registrar/whois-privacy/#renew
## Examples:
client = %Dnsimple.Client{access_token: "<PASSWORD>"}
{:ok, response} = Dnsimple.Registrar.renew_whois_privacy(client, account_id = 1010, domain_id = "example.com")
"""
@spec renew_whois_privacy(Client.t, integer | String.t, String.t, Keyword.t) :: {:ok|:error, Response.t}
def renew_whois_privacy(client, account_id, domain_name, options \\ []) do
url = Client.versioned("/#{account_id}/registrar/domains/#{domain_name}/whois_privacy/renewals")
Client.post(client, url, options)
|> Response.parse(%{"data" => %WhoisPrivacyRenewal{}})
end
@doc """
Returns the name servers the domain is delegating to.
See:
- https://developer.dnsimple.com/v2/registrar/delegation/#list
## Examples:
client = %Dnsimple.Client{access_token: "<PASSWORD>"}
{:ok, response} = Dnsimple.Registrar.get_domain_delegation(client, account_id = 1010, domain_id = "example.com")
"""
@spec get_domain_delegation(Client.t, integer | String.t, String.t, Keyword.t) :: {:ok|:error, Response.t}
def get_domain_delegation(client, account_id, domain_name, options \\ []) do
url = Client.versioned("/#{account_id}/registrar/domains/#{domain_name}/delegation")
Client.get(client, url, options)
|> Response.parse(%{"data" => []})
end
@doc """
Changes the domain's name servers and returns them.
See:
- https://developer.dnsimple.com/v2/registrar/delegation/#update
## Examples:
client = %Dnsimple.Client{access_token: "<PASSWORD>"}
{:ok, response} = Dnsimple.Registrar.change_domain_delegation(client, account_id = 1010, domain_id = "example.com", [
"ns1.provider.com",
"ns2.provider.com",
"ns3.provider.com",
"ns4.provider.com",
])
"""
@spec change_domain_delegation(Client.t, integer | String.t, String.t, list(), keyword()) :: {:ok|:error, Response.t}
def change_domain_delegation(client, account_id, domain_name, name_servers, options \\ []) do
url = Client.versioned("/#{account_id}/registrar/domains/#{domain_name}/delegation")
Client.put(client, url, name_servers, options)
|> Response.parse(%{"data" => []})
end
@doc """
Delegates the domain to vanity name servers.
See:
- https://developer.dnsimple.com/v2/registrar/delegation/#delegateToVanity
## Examples:
client = %Dnsimple.Client{access_token: "<PASSWORD>"}
{:ok, response} = Dnsimple.Registrar.change_domain_delegation_to_vanity(client, account_id = 1010, domain_id = "example.com", [
"ns1.example.com",
"ns2.example.com",
"ns3.example.com",
"ns4.example.com",
])
"""
@spec change_domain_delegation_to_vanity(Client.t, integer | String.t, String.t, list(), keyword()) :: {:ok|:error, Response.t}
def change_domain_delegation_to_vanity(client, account_id, domain_name, name_servers, options \\ []) do
url = Client.versioned("/#{account_id}/registrar/domains/#{domain_name}/delegation/vanity")
Client.put(client, url, name_servers, options)
|> Response.parse(%{"data" => [%VanityNameServer{}]})
end
@doc """
Reverts all the operations performed to delegate to vanity name servers and
delegates the domain back to DNSimple's name servers (if DNSimple is the
registrar of the domain).
See:
- https://developer.dnsimple.com/v2/registrar/delegation/#dedelegateFromVanity
## Examples:
client = %Dnsimple.Client{access_token: "<PASSWORD>"}
{:ok, response} = Dnsimple.Registrar.change_domain_delegation_from_vanity(client, account_id = 1010, domain_id = "example.com")
"""
@spec change_domain_delegation_from_vanity(Client.t, integer | String.t, String.t, Keyword.t) :: {:ok|:error, Response.t}
def change_domain_delegation_from_vanity(client, account_id, domain_name, options \\ []) do
url = Client.versioned("/#{account_id}/registrar/domains/#{domain_name}/delegation/vanity")
Client.delete(client, url, options)
|> Response.parse(nil)
end
end
|
lib/dnsimple/registrar.ex
| 0.627951
| 0.465327
|
registrar.ex
|
starcoder
|
defmodule PhantomChain.Client.Logger do
@moduledoc """
This is a wrapper for the standard Logger module. It contains convenience
methods for logging exceptions and objects in addition to strings.
"""
require Logger
# Contextual
@doc """
Converts a rescued error and stacktrace into a pretty error output.
Returns the error object passed.
"""
@spec rescued_error(Exception.t(), List.t()) :: Exception.t()
def rescued_error(err, stacktrace) do
:error |> Exception.format(err, stacktrace) |> error
err
end
# Generic
@doc """
Outputs the object or string in debug mode.
Returns the object passed.
"""
@spec debug(any()) :: any()
def debug(value) when is_bitstring(value) do
Logger.debug(value)
value
end
def debug(value) do
value |> pp |> debug
value
end
@doc """
Outputs the object or string in error mode.
Returns the object passed.
"""
@spec error(any()) :: any()
def error(value) when is_bitstring(value) do
Logger.error(value)
value
end
def error(value) do
value |> pp |> error
value
end
@doc """
Outputs the object or string in info mode.
Returns the object passed.
"""
@spec info(any()) :: any()
def info(value) when is_bitstring(value) do
Logger.info(value)
value
end
def info(value) do
value |> pp |> info
value
end
@doc """
Outputs the object or string in the mode passed mode.
Returns the object passed.
"""
@spec log(Atom.t(), any()) :: any()
def log(:debug, value) do
debug(value)
end
def log(:default, value) do
info(value)
end
def log(:error, value) do
error(value)
end
def log(:info, value) do
info(value)
end
def log(:warn, value) do
warn(value)
end
@doc """
Outputs the object or string in warn mode.
Returns the object passed.
"""
def warn(value) when is_bitstring(value) do
Logger.warn(value)
value
end
def warn(value) do
value |> pp |> warn
value
end
# private
defp pp(value) do
inspect(value, pretty: true, width: 100)
end
end
|
lib/phantomchain/client/logger.ex
| 0.847337
| 0.533033
|
logger.ex
|
starcoder
|
defmodule Beiin.Record do
defstruct metric: "name", tags: %{"default" => "value"}, timestamp: 0, value: 0
end
defmodule Beiin.RecordServer do
use GenServer
alias Beiin.Record
alias Beiin.TimestampGenerator, as: TSG
## Client API
def start_link(metrics, tag_maps, opts \\ []) do
{ins_tsg, opts} = Keyword.pop(opts, :ins_tsg, TSG)
{read_tsg, opts} = Keyword.pop(opts, :read_tsg, TSG)
init_map = %{
metrics: metrics,
next_ins_records: [],
next_read_records: [],
tag_maps: tag_maps,
ins_tsg: ins_tsg,
read_tsg: read_tsg
}
GenServer.start_link(__MODULE__, init_map, opts)
end
def next_insert(server) do
GenServer.call(server, {:next_insert})
end
def next_read(server) do
GenServer.call(server, {:next_insert})
end
## Server Callbacks
def init(map) do
{:ok, map}
end
defp generate_next_records(metrics, tag_maps, tsg_ref) do
timestamp = TSG.next_timestamp(tsg_ref)
Enum.map(metrics, fn metric -> %Record{metric: metric, timestamp: timestamp} end)
|> Enum.map(fn record -> Enum.map(tag_maps, fn t -> %{record | tags: t} end) end)
|> List.flatten()
end
def handle_call({:next_insert}, _from, map) do
{:ok, metrics} = Map.fetch(map, :metrics)
{:ok, tag_maps} = Map.fetch(map, :tag_maps)
{:ok, ins_tsg} = Map.fetch(map, :ins_tsg)
{next, new_map} =
Map.get_and_update(map, :next_ins_records, fn records ->
[next | rs] =
case records do
[] -> generate_next_records(metrics, tag_maps, ins_tsg)
_ -> records
end
{next, rs}
end)
{:reply, next, new_map}
end
def handle_call({:next_read}, _from, map) do
{:ok, metrics} = Map.fetch(map, :metrics)
{:ok, tag_maps} = Map.fetch(map, :tag_maps)
{:ok, read_tsg} = Map.fetch(map, :read_tsg)
{next, new_map} =
Map.get_and_update(map, :next_read_records, fn records ->
[next | rs] =
case records do
[] -> generate_next_records(metrics, tag_maps, read_tsg)
_ -> records
end
{next, rs}
end)
{:reply, next, new_map}
end
end
|
lib/record_server.ex
| 0.647798
| 0.421909
|
record_server.ex
|
starcoder
|
defmodule Mnemo do
@moduledoc """
Implementation of [BIP39](https://github.com/bitcoin/bips/blob/master/bip-0039.mediawiki)
"""
@valid_strenghts [128, 160, 192, 224, 256]
@default_strength 256
@valid_mnemonic_word_count [12, 15, 18, 21, 24]
@pbkdf2_opts rounds: 2048, digest: :sha512, length: 64, format: :hex
@doc """
Generates random English mnemonic.
Optional entropy length (`strength`) argument can be provided; defaults to 256 bits.
"""
def generate(strength \\ @default_strength) when strength in @valid_strenghts do
strength
|> div(8)
|> :crypto.strong_rand_bytes()
|> mnemonic()
end
@doc """
Generates English mnemonic for pre-existing entropy (obtained from elsehwere).
"""
def mnemonic(entropy) do
entropy
|> maybe_decode()
|> update_with_checksum()
|> sentence()
|> Enum.map(&word/1)
|> Enum.join(" ")
end
@doc """
Converts English mnemonic to its binary entropy.
Validates the provided number of words, their existence in English wordlist
and finally, the checksum.
If `hex: true` option is provided, the result is hex-encoded.
"""
def entropy(mnemonic, opts \\ []) do
words = String.split(mnemonic)
if length(words) not in @valid_mnemonic_word_count do
raise "Number of words must be one of the following: [12, 15, 18, 21, 24]"
end
sentence = for(word <- words, do: <<index(word)::size(11)>>, into: "")
divider_index = floor(bit_size(sentence) / 33) * 32
<<entropy::size(divider_index), checksum::bitstring>> = sentence
ent = <<entropy::size(divider_index)>>
cs = decode_integer(checksum)
as_hex? = Keyword.get(opts, :hex, false)
case checksum(ent) do
{^cs, _} ->
if as_hex?, do: Base.encode16(ent, case: :lower), else: ent
{other, _} ->
raise "Invalid mnemonic (checksum mismatch): #{inspect(mnemonic)}. Got #{other}, expected: #{
cs
}"
end
end
@doc """
Retrieves English word by index.
Non-English wordlists are not implemented yet.
"""
def word(i, lang \\ :english) when i in 0..2047 do
lang
|> wordlist_stream()
|> Stream.filter(fn {_value, index} -> index == i end)
|> Enum.at(0)
|> elem(0)
|> String.trim()
end
@doc """
Retrieves index for an English word.
Non-English wordlists are not implemented yet.
"""
def index(word, lang \\ :english) when is_binary(word) do
fetch = fn
[] -> raise "Invalid word: #{word}"
[{_word, index}] -> index
end
lang
|> wordlist_stream()
|> Stream.filter(fn {value, _index} -> String.trim(value) == word end)
|> Stream.take(1)
|> Enum.to_list()
|> fetch.()
end
@doc """
Derives a hex-encoded PBKDF2 seed from mnemonic.
Optional passhprase can be provided in the second argument.
Does not validate any mnemonic properties.
"""
def seed(mnemonic, passphrase \\ "") do
Pbkdf2.Base.hash_password(mnemonic, "mnemonic#{passphrase}", @pbkdf2_opts)
end
@doc """
Returns a list of 11-bit word indices for given ENT_CS.
"""
def sentence(ent_cs), do: bit_chunk(ent_cs, 11)
@doc """
Decodes unsigned integer from a binary. Bitstrings are left-padded.
"""
def decode_integer(b) when is_bitstring(b) do
b
|> pad_leading_zeros()
|> :binary.decode_unsigned(:big)
end
@doc """
Calculates CS for given ENT.
Returns a tuple consisting of the checksum and its bit size.
"""
def checksum(ent) do
s = div(bit_size(ent), 32)
{bit_slice(:crypto.hash(:sha256, ent), s), s}
end
@doc """
Left pads a bitstring with zeros.
"""
def pad_leading_zeros(bs) when is_binary(bs), do: bs
def pad_leading_zeros(bs) when is_bitstring(bs) do
pad_length = 8 - rem(bit_size(bs), 8)
<<0::size(pad_length), bs::bitstring>>
end
@doc """
Splits bitstring `b` into `n`-bit chunks.
"""
def bit_chunk(b, n) when is_bitstring(b) and is_integer(n) and n > 1 do
bit_chunk(b, n, [])
end
defp bit_chunk(b, n, acc) when bit_size(b) <= n do
Enum.reverse([decode_integer(<<b::bitstring>>) | acc])
end
defp bit_chunk(b, n, acc) do
<<chunk::size(n), rest::bitstring>> = b
bit_chunk(rest, n, [decode_integer(<<chunk::size(n)>>) | acc])
end
defp bit_slice(bin, n) do
<<x::integer-size(n), _t::bitstring>> = bin
x
end
defp maybe_decode(ent) do
ent =
case Base.decode16(ent, case: :mixed) do
:error -> ent
{:ok, decoded} -> decoded
end
bit_size(ent) in @valid_strenghts || raise "ENT must be #{inspect(@valid_strenghts)} bits"
ent
end
defp update_with_checksum(ent) do
{checksum, checksum_size} = checksum(ent)
<<ent::binary, checksum::size(checksum_size)>>
end
defp wordlist_stream(lang) do
:mnemo
|> Application.app_dir()
|> Path.join("priv/#{lang}.txt")
|> File.stream!()
|> Stream.with_index()
end
end
|
lib/mnemo.ex
| 0.854126
| 0.585279
|
mnemo.ex
|
starcoder
|
defmodule Phoenix.LiveView.Diff do
# The diff engine is responsible for tracking the rendering state.
# Given that components are part of said state, they are also
# handled here.
@moduledoc false
alias Phoenix.LiveView.{View, Rendered, Comprehension, Component}
@components :c
@static :s
@dynamics :d
@doc """
Returns the diff component state.
"""
def new_components do
{_ids_to_state = %{}, _cids_to_id = %{}, _uuids = 0}
end
@doc """
Returns the diff fingerprint state.
"""
def new_fingerprints do
{nil, %{}}
end
@doc """
Renders a diff for the rendered struct in regards to the given socket.
"""
def render(%{fingerprints: prints} = socket, %Rendered{} = rendered, components) do
{diff, prints, pending_components, components} =
traverse(socket, rendered, prints, %{}, components)
{component_diffs, components} =
render_pending_components(socket, pending_components, %{}, components)
socket = %{socket | fingerprints: prints}
if map_size(component_diffs) == 0 do
{socket, diff, components}
else
{socket, Map.put(diff, @components, component_diffs), components}
end
end
@doc """
Execute the `fun` with the component `cid` with the given `socket` as template.
It will store the result under the `cid` key in the `component_diffs` map.
It returns the updated `component_diffs` and the updated `components` or
`:error` if the component cid does not exist.
## Example
{component_diffs, components} =
with_component(socket, cid, %{}, state.components, fn socket, component ->
case component.handle_event("...", ..., socket) do
{:noreply, socket} -> socket
end
end)
"""
def with_component(socket, cid, component_diffs, components, fun) when is_integer(cid) do
{id_to_components, cid_to_ids, _} = components
case cid_to_ids do
%{^cid => {component, _} = id} ->
{^cid, assigns, private, fingerprints} = Map.fetch!(id_to_components, id)
{pending_components, component_diffs, components} =
socket
|> configure_socket_for_component(assigns, private, fingerprints)
|> fun.(component)
|> render_component(id, cid, false, %{}, component_diffs, components)
{component_diffs, components} =
render_pending_components(socket, pending_components, component_diffs, components)
{%{@components => component_diffs}, components}
%{} ->
:error
end
end
@doc """
Deletes a component by `cid`.
"""
def delete_component(cid, {id_to_components, cid_to_ids, uuids}) do
{id, cid_to_ids} = Map.pop(cid_to_ids, cid)
{Map.delete(id_to_components, id), cid_to_ids, uuids}
end
@doc """
Converts a component to a rendered struct.
"""
def component_to_rendered(socket, component, assigns) do
socket
|> mount_component(component)
|> View.maybe_call_update!(component, assigns)
|> View.to_rendered(component)
end
## Traversal
defp traverse(
socket,
%Rendered{fingerprint: fingerprint, dynamic: dynamic},
{fingerprint, children},
pending_components,
components
) do
{_counter, diff, children, pending_components, components} =
traverse_dynamic(socket, dynamic, children, pending_components, components)
{diff, {fingerprint, children}, pending_components, components}
end
defp traverse(
socket,
%Rendered{fingerprint: fingerprint, static: static, dynamic: dynamic},
_,
pending_components,
components
) do
{_counter, diff, children, pending_components, components} =
traverse_dynamic(socket, dynamic, %{}, pending_components, components)
{Map.put(diff, @static, static), {fingerprint, children}, pending_components, components}
end
defp traverse(
socket,
%Component{id: nil, component: component, assigns: assigns},
fingerprints_tree,
pending_components,
components
) do
rendered = component_to_rendered(socket, component, assigns)
traverse(socket, rendered, fingerprints_tree, pending_components, components)
end
defp traverse(
socket,
%Component{} = component,
fingerprints_tree,
pending_components,
components
) do
{cid, pending_components, components} =
traverse_component(socket, component, pending_components, components)
{cid, fingerprints_tree, pending_components, components}
end
defp traverse(
socket,
%Comprehension{dynamics: dynamics, fingerprint: fingerprint},
fingerprint,
pending_components,
components
) do
{dynamics, {pending_components, components}} =
comprehension_to_iodata(socket, dynamics, pending_components, components)
{%{@dynamics => dynamics}, fingerprint, pending_components, components}
end
defp traverse(
socket,
%Comprehension{static: static, dynamics: dynamics, fingerprint: fingerprint},
_,
pending_components,
components
) do
{dynamics, {pending_components, components}} =
comprehension_to_iodata(socket, dynamics, pending_components, components)
{%{@dynamics => dynamics, @static => static}, fingerprint, pending_components, components}
end
defp traverse(_socket, nil, fingerprint_tree, pending_components, components) do
{nil, fingerprint_tree, pending_components, components}
end
defp traverse(_socket, iodata, _, pending_components, components) do
{IO.iodata_to_binary(iodata), nil, pending_components, components}
end
defp traverse_dynamic(socket, dynamic, children, pending_components, components) do
Enum.reduce(dynamic, {0, %{}, children, pending_components, components}, fn
entry, {counter, diff, children, pending_components, components} ->
{serialized, child_fingerprint, pending_components, components} =
traverse(socket, entry, Map.get(children, counter), pending_components, components)
diff =
if serialized do
Map.put(diff, counter, serialized)
else
diff
end
children =
if child_fingerprint do
Map.put(children, counter, child_fingerprint)
else
Map.delete(children, counter)
end
{counter + 1, diff, children, pending_components, components}
end)
end
defp comprehension_to_iodata(socket, dynamics, pending_components, components) do
Enum.map_reduce(dynamics, {pending_components, components}, fn list, acc ->
Enum.map_reduce(list, acc, fn rendered, {pending_components, components} ->
{diff, _, pending_components, components} =
traverse(socket, rendered, {nil, %{}}, pending_components, components)
{diff, {pending_components, components}}
end)
end)
end
## Stateful components helpers
defp traverse_component(
socket,
%Component{id: id, assigns: assigns, component: component},
pending_components,
components
) do
{cid, new?, components} = ensure_component(socket, {component, id}, components)
entry = {id, new?, assigns}
pending_components = Map.update(pending_components, component, [entry], &[entry | &1])
{cid, pending_components, components}
end
defp ensure_component(socket, {component, _} = id, {id_to_components, cid_to_ids, uuids}) do
case id_to_components do
%{^id => {cid, _assigns, _private, _component_prints}} ->
{cid, false, {id_to_components, cid_to_ids, uuids}}
%{} ->
cid = uuids
socket = mount_component(socket, component)
id_to_components = Map.put(id_to_components, id, dump_component(socket, cid))
cid_to_ids = Map.put(cid_to_ids, cid, id)
{cid, true, {id_to_components, cid_to_ids, uuids + 1}}
end
end
defp mount_component(socket, component) do
socket = configure_socket_for_component(socket, %{}, %{}, new_fingerprints())
View.maybe_call_mount!(socket, component, [socket])
end
defp configure_socket_for_component(socket, assigns, private, prints) do
%{
socket
| assigns: assigns,
private: private,
fingerprints: prints
}
end
defp dump_component(socket, cid) do
{cid, socket.assigns, socket.private, socket.fingerprints}
end
## Component rendering
defp render_pending_components(_, pending_components, component_diffs, components)
when map_size(pending_components) == 0 do
{component_diffs, components}
end
defp render_pending_components(socket, pending_components, component_diffs, components) do
{id_to_components, _, _} = components
acc = {%{}, component_diffs, components}
{pending_components, component_diffs, components} =
Enum.reduce(pending_components, acc, fn {component, entries}, acc ->
entries = maybe_preload_components(component, Enum.reverse(entries))
Enum.reduce(entries, acc, fn {id, new?, new_assigns}, acc ->
{pending_components, component_diffs, components} = acc
id = {component, id}
%{^id => {cid, assigns, private, component_prints}} = id_to_components
socket
|> configure_socket_for_component(assigns, private, component_prints)
|> View.maybe_call_update!(component, new_assigns)
|> render_component(id, cid, new?, pending_components, component_diffs, components)
end)
end)
render_pending_components(socket, pending_components, component_diffs, components)
end
defp maybe_preload_components(component, entries) do
if function_exported?(component, :preload, 1) do
list_of_assigns = Enum.map(entries, fn {_id, _new?, new_assigns} -> new_assigns end)
result = component.preload(list_of_assigns)
zip_preloads(result, entries, component, result)
else
entries
end
end
defp zip_preloads([new_assigns | assigns], [{id, new?, _} | entries], component, preloaded)
when is_map(new_assigns) do
[{id, new?, new_assigns} | zip_preloads(assigns, entries, component, preloaded)]
end
defp zip_preloads([], [], _component, _preloaded) do
[]
end
defp zip_preloads(_, _, component, preloaded) do
raise ArgumentError,
"expected #{inspect(component)}.preload/1 to return a list of maps of the same length " <>
"as the list of assigns given, got: #{inspect(preloaded)}"
end
defp render_component(socket, id, cid, new?, pending_components, component_diffs, components) do
{component, _} = id
{socket, pending_components, component_diffs, {id_to_components, cid_to_ids, uuids}} =
if new? or View.changed?(socket) do
rendered = View.to_rendered(socket, component)
{diff, component_prints, pending_components, components} =
traverse(socket, rendered, socket.fingerprints, pending_components, components)
socket = View.clear_changed(%{socket | fingerprints: component_prints})
{socket, pending_components, Map.put(component_diffs, cid, diff), components}
else
{socket, pending_components, component_diffs, components}
end
id_to_components = Map.put(id_to_components, id, dump_component(socket, cid))
{pending_components, component_diffs, {id_to_components, cid_to_ids, uuids}}
end
end
|
lib/phoenix_live_view/diff.ex
| 0.839767
| 0.622631
|
diff.ex
|
starcoder
|
defmodule Chunkr.Opts do
@moduledoc """
Options for paginating.
## Fields
* `:repo` — The `Ecto.Repo` for the query.
* `:planner` — The module implementing the pagination strategy.
* `:query` — The non-paginated query to be extended for pagination purposes.
* `:strategy` — The name of the pagination strategy to use.
* `:sort_dir` — The primary sort direction used for the query. Note that this
aligns with the very first `sort` clause registered in the named pagination strategy.
Any subsequent sort directions within the strategy will always be automatically
adjusted to maintain the overall strategy.
* `:paging_dir` — Either `:forward` or `:backward` depending on whether gathering
results from the start or the end of the result set (i.e. whether the limit was
specified as `:first` or `:last`).
* `:cursor` — The `:after` or `:before` cursor beyond which results are retrieved.
* `:cursor_mod` — The module implementing the `Chunkr.Cursor` behaviour to be used
for encoding/decoding cursor values. The default is `Chunkr.Cursor.Base64`, but a
custom cursor module can be provided.
* `:max_limit` — The maximum allowed page size.
* `:limit` — The requested page size (as specified by `:first` or `:last`).
"""
@type sort_dir :: :asc | :desc
@type t :: %__MODULE__{
repo: atom(),
planner: atom(),
query: Ecto.Query.t(),
strategy: atom(),
sort_dir: sort_dir(),
paging_dir: :forward | :backward,
cursor: Chunkr.Cursor.cursor() | nil,
cursor_mod: module(),
max_limit: pos_integer(),
limit: pos_integer()
}
defstruct [
:repo,
:planner,
:query,
:strategy,
:sort_dir,
:paging_dir,
:cursor,
:cursor_mod,
:max_limit,
:limit
]
@spec new(any, any, sort_dir, keyword) :: {:invalid_opts, String.t()} | {:ok, struct}
@doc """
Validate provided options and return a `Chunkr.Opts` struct
"""
def new(query, strategy, sort_dir, opts) do
case validate_options(strategy, opts) do
{:ok, opts} -> {:ok, struct!(%__MODULE__{query: query, sort_dir: sort_dir}, opts)}
{:error, message} -> {:invalid_opts, message}
end
end
defp validate_options(strategy, opts) do
with {:ok, limit, cursor, paging_direction} <- validate(opts),
{:ok, _limit} <- validate_limit(limit, opts) do
{:ok,
%{
repo: Keyword.fetch!(opts, :repo),
planner: Keyword.fetch!(opts, :planner),
strategy: strategy,
paging_dir: paging_direction,
max_limit: Keyword.fetch!(opts, :max_limit),
limit: limit,
cursor: cursor,
cursor_mod: Keyword.fetch!(opts, :cursor_mod)
}}
end
end
@valid_keys [
[:first],
[:first, :after],
[:last],
[:last, :before]
]
@valid_sets Enum.map(@valid_keys, &MapSet.new/1)
@valid_combos @valid_keys
|> Enum.map(&Enum.join(&1, ", "))
|> Enum.map(&"[#{&1}]")
|> Enum.join(" | ")
defp validate(opts) do
provided_keys = opts |> Keyword.take([:first, :last, :after, :before]) |> Keyword.keys()
provided_key_set = MapSet.new(provided_keys)
case MapSet.new(@valid_sets) |> MapSet.member?(provided_key_set) do
true -> {:ok, get_limit(opts), get_cursor(opts), get_paging_direction(opts)}
false -> {:error, pagination_args_error(provided_keys)}
end
end
defp get_limit(opts) do
Keyword.get(opts, :first) || Keyword.get(opts, :last)
end
defp get_cursor(opts) do
Keyword.get(opts, :after) || Keyword.get(opts, :before)
end
defp get_paging_direction(opts) do
if Keyword.get(opts, :first), do: :forward, else: :backward
end
defp pagination_args_error(provided_keys) do
~s(Invalid pagination params: [#{Enum.join(provided_keys, ", ")}]. Valid combinations are: #{@valid_combos}.)
end
defp validate_limit(limit, opts) do
max_limit = Keyword.fetch!(opts, :max_limit)
cond do
limit < 0 ->
{:error, "Page size of #{limit} was requested, but page size must be at least 0."}
limit <= max_limit ->
{:ok, limit}
true ->
{:error, "Page size of #{limit} was requested, but maximum page size is #{max_limit}."}
end
end
end
|
lib/chunkr/opts.ex
| 0.882098
| 0.70638
|
opts.ex
|
starcoder
|
defmodule Membrane.Element.Pad do
@moduledoc """
Pads are units defined by each element, allowing it to be linked with another
elements. This module consists of pads typespecs and utils.
Each pad is described by its name, direction, availability, mode and possible caps.
For pads to be linkable, these properties have to be compatible. For more
information on each of them, check appropriate type in this module.
Each link can only consist of exactly two pads.
"""
use Bunch
use Bunch.Typespec
@typedoc """
Defines the term by which the pad instance is identified.
"""
@type ref_t :: atom | {:dynamic, atom, non_neg_integer}
@typedoc """
Defines the name of pad or group of dynamic pads
"""
@type name_t :: atom
@typedoc """
Defines possible pad directions:
- `:output` - data can only be sent through such pad,
- `:input` - data can only be received through such pad.
One cannot link two pads with the same direction.
"""
@type direction_t :: :output | :input
@typedoc """
Type describing possible pad modes. They are strictly related to pad directions:
- `:push` output pad - element can send data through such pad whenever it wants.
- `:push` input pad - element has to deal with data whenever it comes through
such pad, and do it fast enough not to let data accumulate on such pad, what
may lead to overflow of element process erlang queue, which is highly unwanted.
- `:pull` output pad - element can send data through such pad only if it have
already received demand on the pad. Sending small, limited amount of
undemanded data is supported and handled by `Membrane.Core.PullBuffer`.
- `:pull` input pad - element receives through such pad only data that it has
previously demanded, so that no undemanded data can arrive.
Linking pads with different modes is possible, but only in case of output pad
working in push mode, and input in pull mode. Moreover, toilet mode of
`Membrane.Core.PullBuffer` has to be enabled then.
For more information on transfering data and demands, see docs for element
callbacks in `Membrane.Element.Base.*`.
"""
@type mode_t :: :push | :pull
@typedoc """
Defines possible pad availabilities:
- `:always` - a static pad, which can remain unlinked in `stopped` state only.
- `:on_request` - a dynamic pad, instance of which is created every time it is
linked to another pad. Thus linking the pad with _k_ other pads, creates _k_
instances of the pad, and links each with another pad.
"""
@list_type availability_t :: [:always, :on_request]
@typedoc """
Type describing pad modes:
- `:static` - there always exist exactly one instance of such pad.
- `:dynamic` - multiple instances of such pad may be created and removed (which
entails executing `handle_pad_added` and `handle_pad_removed` callbacks,
respectively).
"""
@type availability_mode_t :: :static | :dynamic
defguard is_pad_ref(term)
when term |> is_atom or
(term |> is_tuple and term |> tuple_size == 3 and term |> elem(0) == :dynamic and
term |> elem(1) |> is_atom and term |> elem(2) |> is_integer)
defguard is_pad_name(term) when is_atom(term)
defguard is_availability(term) when term in @availability_t
defguard is_availability_dynamic(availability) when availability == :on_request
defguard is_availability_static(availability) when availability == :always
@doc """
Returns pad availability mode based on pad reference.
"""
@spec availability_mode_by_ref(ref_t) :: availability_mode_t
def availability_mode_by_ref({:dynamic, _name, _id}), do: :dynamic
def availability_mode_by_ref(ref) when is_atom(ref), do: :static
@doc """
Returns pad availability mode for given availability.
"""
@spec availability_mode(availability_t) :: availability_mode_t
def availability_mode(:always), do: :static
def availability_mode(:on_request), do: :dynamic
@doc """
Returns the name for the given pad reference
"""
def name_by_ref({:dynamic, name, _id}) when is_pad_name(name), do: name
def name_by_ref(ref) when is_pad_name(ref), do: ref
end
|
lib/membrane/element/pad.ex
| 0.889021
| 0.732137
|
pad.ex
|
starcoder
|
defmodule JaSerializer.DSL do
@moduledoc """
A DSL for defining JSON-API.org spec compliant payloads.
Built on top of the `JaSerializer.Serializer` behaviour.
The following macros are available:
* `location/1` - Define the url of a single serialized object.
* `attributes/1` - Define the attributes to be returned.
* `has_many/2` - Define a has_many relationship.
* `has_one/2` - Define a has_one or belongs_to relationship.
This module should always be used in conjunction with
`JaSerializer.Serializer`, see `JaSerializer` for the best way to do so.
## DSL Usage Example
defmodule PostSerializer do
use JaSerializer, dsl: true
location "/posts/:id"
attributes [:title, :body, :excerpt, :tags]
has_many :comments, links: [related: "/posts/:id/comments"]
has_one :author, serializer: PersonSerializer, include: true
def excerpt(post, _conn) do
[first | _ ] = String.split(post.body, ".")
first
end
end
post = %Post{
id: 1,
title: "jsonapi.org + Elixir = Awesome APIs",
body: "so. much. awesome.",
author: %Person{name: "Alan"}
}
post
|> PostSerializer.format
|> Poison.encode!
When `use`ing JaSerializer.DSL the default implementations of the `links/2`,
`attributes/2`, and `relationships/2` callbacks will be defined on your module.
Overriding these callbacks can be a great way to customize your serializer
beyond what the DSL provides. See `JaSerializer.Serializer` for examples.
"""
alias JaSerializer.Relationship.HasMany
alias JaSerializer.Relationship.HasOne
@doc false
defmacro __using__(_) do
quote do
@attributes []
@relations []
@location nil
import JaSerializer.DSL,
only: [
attributes: 1,
location: 1,
has_many: 2,
has_one: 2,
has_many: 1,
has_one: 1
]
unquote(define_default_attributes())
unquote(define_default_relationships())
unquote(define_default_links())
@before_compile JaSerializer.DSL
end
end
@doc false
defmacro __before_compile__(env) do
quote do
def __relations, do: @relations
def __location, do: @location
def __attributes, do: @attributes
unquote(define_inlined_attributes_map(env))
end
end
defp define_inlined_attributes_map(env) do
attributes = Module.get_attribute(env.module, :attributes)
conn = quote do: conn
struct = quote do: struct
# Construct ASL for map with keys from attributes calling the attribute fn
body =
{:%{}, [], Enum.map(attributes, fn k -> {k, {k, [], [struct, conn]}} end)}
quote do
@compile {:inline, inlined_attributes_map: 2}
def inlined_attributes_map(unquote(struct), unquote(conn)),
do: unquote(body)
defoverridable attributes: 2
end
end
defp define_default_attributes do
quote do
@compile {:inline, attributes: 2}
def attributes(struct, conn) do
inlined_attributes_map(struct, conn)
end
defoverridable attributes: 2
end
end
defp define_default_relationships do
quote do
def relationships(struct, _conn) do
JaSerializer.DSL.default_relationships(__MODULE__)
end
defoverridable relationships: 2
end
end
@doc false
def default_relationships(serializer) do
serializer.__relations
|> Enum.map(&dsl_to_struct/1)
|> Enum.into(%{})
end
defp dsl_to_struct({:has_one, name, opts}),
do: {name, HasOne.from_dsl(name, opts)}
defp dsl_to_struct({:has_many, name, opts}),
do: {name, HasMany.from_dsl(name, opts)}
defp define_default_links do
quote do
def links(data, conn) do
JaSerializer.DSL.default_links(__MODULE__)
end
defoverridable links: 2
end
end
@doc false
def default_links(serializer) do
%{self: serializer.__location}
end
@doc """
Defines the canonical path for retrieving this resource.
## String Examples
String may be either a relative or absolute path. Path segments beginning
with a colon are called as functions on the serializer with the struct and
conn passed in.
defmodule PostSerializer do
use JaSerializer
location "/posts/:id"
end
defmodule CommentSerializer do
use JaSerializer
location "http://api.example.com/posts/:post_id/comments/:id"
def post_id(comment, _conn), do: comment.post_id
end
## Atom Example
When an atom is passed in, it is called as a function on the serializer with
the struct and conn passed in. The function should return a full path/url.
defmodule PostSerializer do
use JaSerializer
import MyPhoenixApp.Router.Helpers
location :post_url
def post_url(post, conn) do
post_path(conn, :show, post.id)
end
end
"""
defmacro location(uri) do
quote bind_quoted: [uri: uri] do
@location uri
end
end
@doc """
Defines a list of attributes as atoms to be included in the payload.
An overridable function for each attribute is generated with the same name
as the attribute. The function's default behavior is to retrieve a field with
the same name from the struct.
For example, if you have `attributes [:body]` a function `body/2` is defined
on the serializer with a default behavior of `Map.get(struct, :body)`.
defmodule PostSerializer do
use JaSerializer, dsl: true
attributes [:title, :body, :html]
def html(post, _conn) do
Earmark.to_html(post.body)
end
end
## Conditional attribute inclusion
JaSerializer supports the `fields` option as per the JSONAPI spec. This
option allows clients to request only the fields they want. For example if
you only wanted the html and the title for the post:
field_param = %{"post" => "title,html", "comment" => "html"}
# Direct Serialization
PostSerializer.format(post, conn, fields: field_param)
# via PhoenixView integrations from controller
render(conn, :show, data: post, opts: [fields: field_param])
## Further customization
Further customization of the attributes returned can be handled by overriding
the `attributes/2` callback. This can be done in conjunction with the DSL
using super, or without the DSL just returning a map.
"""
defmacro attributes(atts) when is_list(atts) do
quote bind_quoted: [atts: atts] do
# Save attributes
@attributes @attributes ++ atts
# Define default attribute function, make overridable
for att <- atts do
@compile {:inline, [{att, 1}, {att, 2}]}
def unquote(att)(m), do: Map.get(m, unquote(att))
def unquote(att)(m, c), do: unquote(att)(m)
defoverridable [{att, 2}, {att, 1}]
end
end
end
@doc """
Add a has_many relationship to be serialized.
JSONAPI.org supports three types or relationships:
* As links - Great for clients lazy loading relationships with lots of data.
* As "Resource Indentifiers" - A type/id pair, useful to relate to data the client already has.
* As Included Resources - The full resource is serialized in the same request (also includes Resource Identifiers).
Links can be combined with either resource identifiers or fully included resources.
See http://jsonapi.org/format/#document-resource-object-relationships for more
details on the spec.
## Link based relationships
Specify a URI or path which responds with the related resource. For example:
defmodule MyApp.PostView do
use JaSerializer
has_many :comments, link: :comments_link
has_one :author, link: "/api/posts/:id/author"
def comments_link(post, conn) do
MyApp.Router.Helpers.post_comment_url(conn, :index, post.id)
end
end
Links can be defined with an atom or string.
String may be either a relative or absolute path. Path segments beginning
with a colon are called as functions on the serializer with the struct and
conn passed in. In the above example id/2 would be called which is defined as
a default callback.
When an atom is passed in, it is called as a function on the serializer with
the struct and conn passed in. The function should return a full path/url.
Both `related` and `self` links are supported, the default `link` creates a
related link:
defmodule PostSerializer do
use JaSerializer
has_many :comments, links: [
related: "/posts/:id/comments"
self: "/posts/:id/relationships/comments"
]
end
## Resource Identifiers (without including)
Return id and type for each related object ("Resource Identifier"). For example:
defmodule MyApp.PostView do
use JaSerializer
has_many :comments, serializer: MyApp.CommentView, include: false
has_many :tags, type: "tags"
has_one :author, type: "user", field: :created_by_id
# ...
end
When you use the `has_many` and `has_one` macros an overridable "data source"
function is defined on your module. The data source function has the same name
as the relationship name and accepts the struct and conn. The data source
function should return the related struct(s) or id(s). In the example above
the following functions are defined for you:
def comments(post, _conn), do: Map.get(post, :comments)
def tags(post, _conn), do: Map.get(post, :tags)
def author(post, _conn), do: Map.get(post, :created_by_id)
These data source functions are expected to return either related objects or
ids, by default they just access the field with the same name as the
relationship. The `field` option can be used to grab the id or struct from a
different field in the serialized object. The author is an example of
customizing this, and is frequently used when returning resource identifiers
for has_one relationships when you have the foreign key in the serialized
struct.
In the comments example when a `serializer` plus `include: false` options are
used the `id/2` and `type/2` functions are called on the defined serializer.
In the tags example where just the `type` option is used the `id` field is
automatically used on each map/struct returned by the data source.
It is important to note that when accessing the relationship fields it is
expected that the relationship is preloaded. For this reason you may want to
consider using links for has_many relationships where possible.
## Including related data
Returns a "Resource Identifier" (see above) as well as the fully serialized
object in the top level `included` key. Example:
defmodule MyApp.PostView do
use JaSerializer
has_many :comments, serializer: MyApp.CommentView, include: true, identifiers: :when_included
has_many :tags, serializer: MyApp.TagView, include: true, identifiers: :always
has_many :author, serializer: MyApp.AuthorView, include: true, field: :created_by
# ...
end
Just like when working with only Resource Identifiers this will define a
'data source' function for each relationship with an arity of two. They will
be overridable and are expected to return maps/structs.
## Conditional Inclusion
JaSerializer supports the `include` option as per the JSONAPI spec. This
option allows clients to include only the relationships they want.
JaSerializer handles the serialization of this for you, however you will have
to handle intellegent preloading of relationships yourself.
When a relationship is not loaded via includes the `identifiers` option will
be used to determine if Resource Identifiers should be serialized or not. The
`identifiers` options accepts the atoms `:when_included` and `:always`.
When specifying the include param, only the relationship requested will be
included. For example, to only include the author and comments:
include_param = "author,comments"
# Direct Serialization
PostSerializer.format(post, conn, include: include_param)
# via PhoenixView integrations from controller
render(conn, :show, data: post, opts: [include: include_param])
## Further Customization
For further customization override the `relationships/2` callback directly.
"""
defmacro has_many(name, opts \\ []) do
normalized_opts = normalize_relation_opts(opts, __CALLER__)
quote do
@relations [
{:has_many, unquote(name), unquote(normalized_opts)} | @relations
]
unquote(JaSerializer.Relationship.default_function(name, normalized_opts))
end
end
@doc """
See documentation for <a href='#has_many/2'>has_many/2</a>.
API is the exact same.
"""
defmacro has_one(name, opts \\ []) do
normalized_opts = normalize_relation_opts(opts, __CALLER__)
quote do
@relations [
{:has_one, unquote(name), unquote(normalized_opts)} | @relations
]
unquote(JaSerializer.Relationship.default_function(name, normalized_opts))
end
end
defp normalize_relation_opts(opts, caller) do
include = opts[:include]
if opts[:field] && !opts[:type] do
IO.write(
:stderr,
IO.ANSI.format([
:red,
:bright,
"warning: The `field` option must be used with a `type` option\n" <>
Exception.format_stacktrace(Macro.Env.stacktrace(caller))
])
)
end
opts =
if opts[:link] do
updated =
opts
|> Keyword.get(:links, [])
|> Keyword.put_new(:related, opts[:link])
Keyword.put(opts, :links, updated)
else
opts
end
case is_boolean(include) or is_nil(include) do
true ->
opts
false ->
IO.write(
:stderr,
IO.ANSI.format([
:red,
:bright,
"warning: Specifying a non-boolean as the `include` option is " <>
"deprecated. If you are specifying the serializer for this " <>
"relation, use the `serializer` option instead. To always " <>
"side-load the relationship, use `include: true` in addition to " <>
"the `serializer` option\n" <>
Exception.format_stacktrace(Macro.Env.stacktrace(caller))
])
)
[serializer: include, include: true] ++ opts
end
end
end
|
lib/ja_serializer/dsl.ex
| 0.908236
| 0.520374
|
dsl.ex
|
starcoder
|
defmodule Quark.Partial do
@moduledoc ~S"""
Provide curried functions, that can also be partially bound without
dot notation. Partially applying a function will always return a
fully-curried function.
Please note that these will use all of the arities up to the defined function.
For instance:
defpartial foo(a, b, c), do: a + b + c
#=> foo/0, foo/1, foo/2, and foo/3
If you need to use an arity in the range below the original
function, fall back to [`defcurry/2`](Quark.Curry.html#defcurry/2) and partially apply manually.
"""
import Quark.Curry
@doc ~S"""
A convenience on [`defcurry/2`](Quark.Curry.html#defcurry/2). Generates a series of partially-bound
applications of a fully-curried function, for all arities _at and below_
the user-specified arity.
For instance:
defpartial add(a,b), do: a + b
#=> add/0, add/1, add/2.
## Examples
defmodule A do
defpartial minus(a, b, c), do: a - b - c
end
A.minus(3, 2, 1)
#=> 0
A.minus.(3).(2).(1)
#=> 0
below_ten = A.minus(10)
below_ten.(2).(1)
#=> 7
below_five = A.minus(20, 15)
below_five.(2)
#=> 3
"""
defmacro defpartial({fun_name, ctx, args}, do: body) do
quote do
defcurry unquote({fun_name, ctx, args}), do: unquote(body)
unquote do: Enum.map(args_scan(args), &rehydrate(fun_name, ctx, &1))
end
end
defp rehydrate(fun_name, ctx, args) do
quote do
def unquote({fun_name, ctx, args}) do
unquote(partial_apply(fun_name, args))
end
end
end
@doc ~S"""
`defpartial/2`, but generates private functions.
"""
defmacro defpartialp({fun_name, ctx, args}, do: body) do
quote do
defcurryp unquote({fun_name, ctx, args}), do: unquote(body)
unquote do: Enum.map(args_scan(args), &rehydratep(fun_name, ctx, &1))
end
end
defp rehydratep(fun_name, ctx, args) do
quote do
defp unquote({fun_name, ctx, args}) do
unquote(partial_apply(fun_name, args))
end
end
end
defp args_scan(args), do: Enum.scan(args, [], &(&2 ++ [&1]))
defp partial_apply(fun_name, args) do
{as, [a]} = Enum.split(args, -1)
quote do
unquote(fun_name)(unquote_splicing(as)).(unquote(a))
end
end
end
|
lib/quark/partial.ex
| 0.675658
| 0.540863
|
partial.ex
|
starcoder
|
defmodule ILI9486 do
@moduledoc """
ILI9486 Elixir driver
"""
use GenServer
use Bitwise
@enforce_keys [:gpio, :opts, :lcd_spi, :data_bus, :display_mode, :chunk_size]
defstruct [
:gpio,
:opts,
:lcd_spi,
:touch_spi,
:touch_pid,
:pix_fmt,
:rotation,
:mad_mode,
:data_bus,
:display_mode,
:frame_rate,
:diva,
:rtna,
:chunk_size
]
@doc """
New connection to an ILI9486
- **port**: SPI port number
Default value: `0`
- **lcd_cs**: LCD chip-selection number
Default value: `0`.
- **touch_cs**: (Optional) Touch panel chip-selection number
Default value: `nil`.
- **touch_irq**: (Optional) Touch panel interrupt. Low level while the Touch Panel detects touching
Default value: `nil`.
- **touch_speed_hz**: SPI Speed for the touch panel
Default value: `50000`.
- **dc**: Command/data register selection
Default value: `24`.
- **rst**: Reset pin for ILI9486
Default value: `nil`.
- **width**: Width of display connected to ILI9486
Default value: `480`.
- **height**: Height of display connected to ILI9486
Default value: `320`.
- **offset_top**: Offset to top row
Default value: `0`.
- **offset_left**: Offset to left column
Default value: `0`.
- **speed_hz**: SPI speed (in Hz)
Default value: `16_000_000`.
- **pix_fmt**: either `:bgr565`, `:rgb565`, `:bgr666` or `:rgb666`
Default value: `:bgr565`.
- **rotation**: Screen rotation.
Default value: `90`. Only `0`, `90`, `180` and `270` are valid.
- **mad_mode**: MAD mode.
Default value: `:right_down`. Valid values: `:right_down`, `:right_up` and `:rgb_mode`
- **display_mode**: Display mode.
Default value: `:normal`. Enters normal display mode after initialization.
- **frame_rate**: Frame rate.
Default value: `70`. Valid frame rate should be one of the following:
- 28
- 30
- 32
- 34
- 36
- 39
- 42
- 46
- 50
- 56
- 62
- 70
- 81
- 96
- 117
- **diva**: Division ratio for internal clocks.
Default value: `0b00`.
- `0b00`: focs
- `0b01`: focs/2
- `0b10`: focs/4
- `0b11`: focs/8
- **rtna**: `RTNA[4:0]` is used to set 1H (line) period of Normal mode at CPU interface.
Default value: `0b10001`. Valid value starts from `0b10000` (16 clocks) to `0b11111` (31 clocks), i.e.,
clocks increases by 1 as `rtna` increasing by 1.
- **is_high_speed**: Is the high speed variant?
Default value: `false`. Set `true` to make it compatible with the high speed variant. (125MHz SPI).
- **chunk_size**: batch transfer size.
Default value: `4096` for the lo-speed variant. `0x8000` for the hi-speed variant.
**return**: `%ILI9486{}`
## Example
```elixir
# default
# assuming LCD device at /dev/spidev0.0
# DC connects to PIN 24
# RST not connected
# SPI speed: 16MHz
# Pixel Format: BGR565
{:ok, disp} = ILI9486.new()
```
```elixir
# default with touch panel
# DC connects to PIN 24
# RST connects to PIN 25
# SPI speed: 16MHz
# Pixel Format: RGB666 (for demo only, not necessary)
# Touch panel device at /dev/spidev0.1
# Touch panel IRQ PIN 17
{:ok, disp} = ILI9486.new(
speed_hz: 16_000_000,
pix_fmt: :bgr666,
rst: 25,
touch_cs: 1,
touch_irq: 17
)
```
high-speed variant (125MHz SPI)
```elixir
# assuming LCD device at /dev/spidev0.0
# DC connects to PIN 24
# RST connects to PIN 25 (for demo only, not necessary)
# SPI speed: 125MHz
# Pixel Format: BGR666 (for demo only, not necessary)
{:ok, disp} = ILI9486.new(
is_high_speed: true,
speed_hz: 125_000_000,
pix_fmt: :bgr666,
rst: 25
)
```
high-speed variant (125MHz SPI) with touch panel
```elixir
# assuming LCD device at /dev/spidev0.0
# DC connects to PIN 24
# RST connects to PIN 25 (for demo only, not necessary)
# SPI speed: 125MHz
# Pixel Format: BGR666 (for demo only, not necessary)
# Touch panel device at /dev/spidev0.1
# Touch panel IRQ PIN 17
{:ok, disp} = ILI9486.new(
is_high_speed: true,
speed_hz: 125_000_000,
pix_fmt: :bgr666,
rst: 25,
touch_cs: 1,
touch_irq: 17
)
```
"""
@doc functions: :client
def new(opts \\ []) do
GenServer.start(__MODULE__, opts)
end
def new!(opts \\ []) do
{:ok, self} = GenServer.start(__MODULE__, opts)
self
end
@impl true
def init(opts) do
port = opts[:port] || 0
lcd_cs = opts[:lcd_cs] || 0
touch_cs = opts[:touch_cs]
touch_irq = opts[:touch_irq]
dc = opts[:dc] || 24
speed_hz = opts[:speed_hz] || 16_000_000
touch_speed_hz = opts[:touch_speed_hz] || 50000
width = opts[:width] || 480
height = opts[:height] || 320
offset_top = opts[:offset_top] || 0
offset_left = opts[:offset_left] || 0
rst = opts[:rst]
pix_fmt = opts[:pix_fmt] || :bgr565
rotation = opts[:rotation] || 90
mad_mode = opts[:mad_mode] || :right_down
display_mode = opts[:display_mode] || :normal
frame_rate = opts[:frame_rate] || 70
diva = opts[:diva] || 0b00
rtna = opts[:rtna] || 0b10001
is_high_speed = opts[:is_high_speed] || false
chunk_size = opts[:chunk_size]
chunk_size =
if chunk_size == nil do
if is_high_speed do
Enum.min([0x8000, Circuits.SPI.max_transfer_size()])
else
Enum.min([4096, Circuits.SPI.max_transfer_size()])
end
else
Enum.min([chunk_size, Circuits.SPI.max_transfer_size()])
end
# supported data connection
# 8-bit parallel MCU interface for low speed ones
# - Waveshare RPi 3.5 LCD (A) / Tested
# - Waveshare RPi 3.5 LCD (B)
# 16-bit parallel MCU interface for the high speed one
# - Waveshare RPi 3.5 LCD (C) / Tested
# :parallel_16bit and :parallel_8bit supported colors
# - 65K colors
# - 262K colors
data_bus = if is_high_speed, do: :parallel_16bit, else: :parallel_8bit
{:ok, lcd_spi} = _init_spi(port, lcd_cs, speed_hz)
{:ok, touch_spi} = _init_spi(port, touch_cs, touch_speed_hz)
{:ok, touch_pid} = _init_touch_irq(touch_irq)
# Set DC as output.
{:ok, gpio_dc} = Circuits.GPIO.open(dc, :output)
# Setup reset as output (if provided).
gpio_rst = _init_reset(rst)
self =
%ILI9486{
lcd_spi: lcd_spi,
touch_spi: touch_spi,
touch_pid: touch_pid,
gpio: [
dc: gpio_dc,
rst: gpio_rst
],
opts: [
port: port,
lcd_cs: lcd_cs,
touch_cs: touch_cs,
dc: dc,
speed_hz: speed_hz,
touch_speed_hz: touch_speed_hz,
width: width,
height: height,
offset_top: offset_top,
offset_left: offset_left,
rst: rst
],
pix_fmt: pix_fmt,
rotation: rotation,
mad_mode: mad_mode,
data_bus: data_bus,
display_mode: display_mode,
frame_rate: frame_rate,
diva: diva,
rtna: rtna,
chunk_size: chunk_size
}
|> _reset()
|> _init(is_high_speed)
{:ok, self}
end
@doc """
Reset the display, if reset pin is connected.
- **self**: `%ILI9486{}`
**return**: `self`
"""
@doc functions: :client
def reset(self_pid) do
GenServer.call(self_pid, :reset)
end
defp _reset(self = %ILI9486{gpio: gpio}) do
gpio_rst = gpio[:rst]
if gpio_rst != nil do
Circuits.GPIO.write(gpio_rst, 1)
:timer.sleep(500)
Circuits.GPIO.write(gpio_rst, 0)
:timer.sleep(500)
Circuits.GPIO.write(gpio_rst, 1)
:timer.sleep(500)
end
self
end
@doc """
Get screen size
- **self**: `%ILI9486{}`
**return**: `%{height: height, width: width}`
"""
@doc functions: :client
def size(self_pid) do
GenServer.call(self_pid, :size)
end
defp _size(%ILI9486{opts: opts}) do
%{height: opts[:height], width: opts[:width]}
end
@doc """
Get display pixel format
- **self**: `%ILI9486{}`
**return**: one of `:bgr565`, `:rgb565`, `:bgr666`, `:rgb666`
"""
@doc functions: :client
def pix_fmt(self_pid) do
GenServer.call(self_pid, :pix_fmt)
end
defp _pix_fmt(%ILI9486{pix_fmt: pix_fmt}) do
pix_fmt
end
@doc """
Set display pixel format
- **self**: `%ILI9486{}`
- **pix_fmt**: one of `:bgr565`, `:rgb565`, :bgr666`, `:rgb666`
**return**: `self`
"""
@doc functions: :client
def set_pix_fmt(self_pid, pix_fmt)
when pix_fmt == :bgr565 or pix_fmt == :rgb565 or pix_fmt == :bgr666 or pix_fmt == :rgb666 do
GenServer.call(self_pid, {:set_pix_fmt, pix_fmt})
end
defp _set_pix_fmt(self = %ILI9486{}, pix_fmt)
when pix_fmt == :bgr565 or pix_fmt == :rgb565 or pix_fmt == :bgr666 or pix_fmt == :rgb666 do
%ILI9486{self | pix_fmt: pix_fmt}
|> _command(kMADCTL(), cmd_data: _mad_mode(self))
end
@doc """
Turn on/off display
- **self**: `%ILI9486{}`
- **status**: either `:on` or `:off`
**return**: `self`
"""
@doc functions: :client
def set_display(self_pid, status) when status == :on or status == :off do
GenServer.call(self_pid, {:set_display, status})
end
defp _set_display(self = %ILI9486{}, :on) do
_command(self, kDISPON())
end
defp _set_display(self = %ILI9486{}, :off) do
_command(self, kDISPOFF())
end
@doc """
Set display mode
- **self**: `%ILI9486{}`
- **display_mode**: Valid values: `:normal`, `:partial`, `:idle`
**return**: `self`
"""
@doc functions: :client
def set_display_mode(self_pid, display_mode) do
GenServer.call(self_pid, {:set_display_mode, display_mode})
end
defp _set_display_mode(self = %ILI9486{}, display_mode = :normal) do
%ILI9486{self | display_mode: display_mode}
|> _command(kNORON())
end
defp _set_display_mode(self = %ILI9486{}, display_mode = :partial) do
%ILI9486{self | display_mode: display_mode}
|> _command(kPTLON())
end
defp _set_display_mode(self = %ILI9486{}, display_mode = :idle) do
%ILI9486{self | display_mode: display_mode}
|> _command(self, kIDLEON())
end
@doc """
Set frame rate
- **self**: `%ILI9486{}`
- **frame_rate**: Valid value should be one of the following
- 28
- 30
- 32
- 34
- 36
- 39
- 42
- 46
- 50
- 56
- 62
- 70
- 81
- 96
- 117
**return**: `:ok` | `{:error, reason}`
"""
@doc functions: :client
def set_frame_rate(self_pid, frame_rate) do
GenServer.call(self_pid, {:set_frame_rate, frame_rate})
end
defp _set_frame_rate(
self = %ILI9486{display_mode: display_mode, diva: diva, rtna: rtna},
frame_rate
) do
index = Enum.find_index(_valid_frame_rates(display_mode), fn valid -> valid == frame_rate end)
p1 =
index
|> bsl(4)
|> bor(diva)
%ILI9486{self | frame_rate: frame_rate}
|> _command(kFRMCTR1())
|> _data(p1)
|> _data(rtna)
end
defp _valid_frame_rates(:normal) do
[28, 30, 32, 34, 36, 39, 42, 46, 50, 56, 62, 70, 81, 96, 117, 117]
end
@doc """
Write the provided 16bit BGR565/RGB565 image to the hardware.
- **self**: `%ILI9486{}`
- **image_data**: Should be 16bit BGR565/RGB565 format (same channel order as in `self`) and
the same dimensions (width x height x 3) as the display hardware.
**return**: `self`
"""
@doc functions: :client
def display_565(self_pid, image_data) when is_binary(image_data) or is_list(image_data) do
GenServer.call(self_pid, {:display_565, image_data})
end
defp _display_565(self, image_data) when is_binary(image_data) do
_display_565(self, :binary.bin_to_list(image_data))
end
defp _display_565(self, image_data) when is_list(image_data) do
self
|> _set_window(x0: 0, y0: 0, x1: nil, y2: nil)
|> _send(image_data, true, false)
end
@doc """
Write the provided 18bit BGR666/RGB666 image to the hardware.
- **self**: `%ILI9486{}`
- **image_data**: Should be 18bit BGR666/RGB666 format (same channel order as in `self`) and
the same dimensions (width x height x 3) as the display hardware.
**return**: `self`
"""
@doc functions: :client
def display_666(self_pid, image_data) when is_binary(image_data) or is_list(image_data) do
GenServer.call(self_pid, {:display_666, image_data})
end
defp _display_666(self, image_data) when is_binary(image_data) do
_display_666(self, :binary.bin_to_list(image_data))
end
defp _display_666(self, image_data) when is_list(image_data) do
self
|> _set_window(x0: 0, y0: 0, x1: nil, y2: nil)
|> _send(image_data, true, false)
end
@doc """
Write the provided 24bit BGR888/RGB888 image to the hardware.
- **self**: `%ILI9486{}`
- **image_data**: Should be 24bit format and the same dimensions (width x height x 3) as the display hardware.
- **pix_fmt**: Either `:rgb888` or `:bgr888`. Indicates the channel order of the provided `image_data`.
**return**: `self`
"""
@doc functions: :client
def display(self_pid, image_data, source_color)
when is_binary(image_data) and (source_color == :rgb888 or source_color == :bgr888) do
GenServer.call(self_pid, {:display, image_data, source_color})
end
defp _display(self = %ILI9486{pix_fmt: target_color}, image_data, source_color)
when is_binary(image_data) and (source_color == :rgb888 or source_color == :bgr888) and
(target_color == :rgb565 or target_color == :bgr565) do
_display_565(self, _to_565(image_data, source_color, target_color))
end
defp _display(self = %ILI9486{pix_fmt: target_color}, image_data, source_color)
when is_binary(image_data) and (source_color == :rgb888 or source_color == :bgr888) and
(target_color == :rgb666 or target_color == :bgr666) do
_display_666(self, _to_666(image_data, source_color, target_color))
end
defp _display(self, image_data, source_color)
when is_list(image_data) and (source_color == :rgb888 or source_color == :bgr888) do
_display(
self,
Enum.map(image_data, &Enum.into(&1, <<>>, fn bit -> <<bit::8>> end)),
source_color
)
end
@doc """
Set touch panel callback function
- **self**: `%ILI9486{}`
- **callback**: callback function. 3 arguments: `pin`, `timestamp`, `status`
"""
@doc functions: :client
def set_touch_callback(self_pid, callback) when is_function(callback) do
GenServer.call(self_pid, {:set_touch_callback, callback})
end
defp _set_touch_callback(self = %ILI9486{touch_pid: touch_pid}, callback)
when is_function(callback) do
GPIOIRQDevice.set_callback(touch_pid, callback)
self
end
@doc """
Write a byte to the display as command data.
- **self**: `%ILI9486{}`
- **cmd**: command data
- **opts**:
- **cmd_data**: cmd data to be sent.
Default value: `[]`. (no data will be sent)
- **delay**: wait `delay` ms after the cmd data is sent
Default value: `0`. (no wait)
**return**: `self`
"""
@doc functions: :client
def command(self_pid, cmd, opts \\ []) when is_integer(cmd) do
GenServer.call(self_pid, {:command, cmd, opts})
end
defp _command(self, cmd, opts \\ [])
defp _command(self = %ILI9486{data_bus: :parallel_8bit}, cmd, opts) when is_integer(cmd) do
cmd_data = opts[:cmd_data] || []
delay = opts[:delay] || 0
self
|> _send(cmd, false, false)
|> _data(cmd_data)
:timer.sleep(delay)
self
end
defp _command(self = %ILI9486{data_bus: :parallel_16bit}, cmd, opts) when is_integer(cmd) do
cmd_data = opts[:cmd_data] || []
delay = opts[:delay] || 0
self
|> _send(cmd, false, true)
|> _data(cmd_data)
:timer.sleep(delay)
self
end
@doc """
Write a byte or array of bytes to the display as display data.
- **self**: `%ILI9486{}`
- **data**: display data
**return**: `self`
"""
@doc functions: :client
def data(_self_pid, []), do: :ok
def data(self_pid, data) do
GenServer.call(self_pid, {:data, data})
end
defp _data(self, []), do: self
defp _data(self = %ILI9486{data_bus: :parallel_8bit}, data) do
_send(self, data, true, false)
end
defp _data(self = %ILI9486{data_bus: :parallel_16bit}, data) do
_send(self, data, true, true)
end
@doc """
Send bytes to the ILI9486
- **self**: `%ILI9486{}`
- **bytes**: The bytes to be sent to `self`
- `when is_integer(bytes)`,
`sent` will take the 8 least-significant bits `[band(bytes, 0xFF)]`
and send it to `self`
- `when is_list(bytes)`, `bytes` will be casting to bitstring and then sent
to `self`
- **is_data**:
- `true`: `bytes` will be sent as data
- `false`: `bytes` will be sent as commands
**return**: `self`
"""
@doc functions: :client
def send(self_pid, bytes, is_data)
when (is_integer(bytes) or is_list(bytes)) and is_boolean(is_data) do
GenServer.call(self_pid, {:send, bytes, is_data})
end
defp to_be_u16(u8_bytes) do
u8_bytes
|> Enum.map(fn u8 -> [0x00, u8] end)
|> IO.iodata_to_binary()
end
defp chunk_binary(binary, chunk_size) when is_binary(binary) do
total_bytes = byte_size(binary)
full_chunks = div(total_bytes, chunk_size)
chunks =
if full_chunks > 0 do
for i <- 0..(full_chunks - 1), reduce: [] do
acc -> [:binary.part(binary, chunk_size * i, chunk_size) | acc]
end
else
[]
end
remaining = rem(total_bytes, chunk_size)
chunks =
if remaining > 0 do
[:binary.part(binary, chunk_size * full_chunks, remaining) | chunks]
else
chunks
end
Enum.reverse(chunks)
end
defp _send(self, bytes, is_data, to_be16 \\ false)
defp _send(self = %ILI9486{}, bytes, true, to_be16) do
_send(self, bytes, 1, to_be16)
end
defp _send(self = %ILI9486{}, bytes, false, to_be16) do
_send(self, bytes, 0, to_be16)
end
defp _send(self = %ILI9486{}, bytes, is_data, to_be16)
when (is_data == 0 or is_data == 1) and is_integer(bytes) do
_send(self, <<Bitwise.band(bytes, 0xFF)>>, is_data, to_be16)
end
defp _send(self = %ILI9486{}, bytes, is_data, to_be16)
when (is_data == 0 or is_data == 1) and is_list(bytes) do
_send(self, IO.iodata_to_binary(bytes), is_data, to_be16)
end
defp _send(
self = %ILI9486{gpio: gpio, lcd_spi: spi, chunk_size: chunk_size},
bytes,
is_data,
to_be16
)
when (is_data == 0 or is_data == 1) and is_binary(bytes) do
gpio_dc = gpio[:dc]
bytes = if to_be16, do: to_be_u16(:binary.bin_to_list(bytes)), else: bytes
Circuits.GPIO.write(gpio_dc, is_data)
for xfdata <- chunk_binary(bytes, chunk_size) do
{:ok, _ret} = Circuits.SPI.transfer(spi, xfdata)
end
self
end
@impl true
def handle_call(:reset, _from, self) do
{:reply, :ok, _reset(self)}
end
@impl true
def handle_call(:size, _from, self) do
ret = _size(self)
{:reply, ret, self}
end
@impl true
def handle_call(:pix_fmt, _from, self) do
ret = _pix_fmt(self)
{:reply, ret, self}
end
@impl true
def handle_call({:set_pix_fmt, pix_fmt}, _from, self) do
{:reply, :ok, _set_pix_fmt(self, pix_fmt)}
end
@impl true
def handle_call({:set_display, status}, _from, self) do
{:reply, :ok, _set_display(self, status)}
end
@impl true
def handle_call({:set_display_mode, display_mode}, _from, self) do
{:reply, :ok, _set_display_mode(self, display_mode)}
end
@impl true
def handle_call({:set_frame_rate, frame_rate}, _from, self) do
{:reply, :ok, _set_frame_rate(self, frame_rate)}
end
@impl true
def handle_call({:display_565, image_data}, _from, self) do
{:reply, :ok, _display_565(self, image_data)}
end
@impl true
def handle_call({:display_666, image_data}, _from, self) do
{:reply, :ok, _display_666(self, image_data)}
end
@impl true
def handle_call({:display, image_data, source_color}, _from, self) do
{:reply, :ok, _display(self, image_data, source_color)}
end
@impl true
def handle_call({:set_touch_callback, callback}, _from, self) do
{:reply, :ok, _set_touch_callback(self, callback)}
end
@impl true
def handle_call({:command, cmd, opts}, _from, self) do
{:reply, :ok, _command(self, cmd, opts)}
end
@impl true
def handle_call({:data, data}, _from, self) do
{:reply, :ok, _data(self, data)}
end
@impl true
def handle_call({:send, bytes, is_data}, _from, self) do
{:reply, :ok, _send(self, bytes, is_data)}
end
defp _init_spi(_port, nil, _speed_hz), do: {:ok, nil}
defp _init_spi(port, cs, speed_hz) when cs >= 0 do
Circuits.SPI.open("spidev#{port}.#{cs}", speed_hz: speed_hz)
end
defp _init_spi(_port, _cs, _speed_hz), do: nil
defp _init_touch_irq(nil), do: {:ok, nil}
defp _init_touch_irq(pin) do
GenServer.start_link(GPIOIRQDevice, pin)
end
defp _init_reset(nil), do: nil
defp _init_reset(rst) when rst >= 0 do
{:ok, gpio} = Circuits.GPIO.open(rst, :output)
gpio
end
defp _init_reset(_), do: nil
defp _get_channel_order(%ILI9486{pix_fmt: :rgb565}), do: kMAD_RGB()
defp _get_channel_order(%ILI9486{pix_fmt: :bgr565}), do: kMAD_BGR()
defp _get_channel_order(%ILI9486{pix_fmt: :rgb666}), do: kMAD_RGB()
defp _get_channel_order(%ILI9486{pix_fmt: :bgr666}), do: kMAD_BGR()
defp _get_pix_fmt(%ILI9486{pix_fmt: :rgb565}), do: k16BIT_PIX()
defp _get_pix_fmt(%ILI9486{pix_fmt: :bgr565}), do: k16BIT_PIX()
defp _get_pix_fmt(%ILI9486{pix_fmt: :rgb666}), do: k18BIT_PIX()
defp _get_pix_fmt(%ILI9486{pix_fmt: :bgr666}), do: k18BIT_PIX()
defp _mad_mode(self = %ILI9486{rotation: 0, mad_mode: :right_down}) do
self
|> _get_channel_order()
|> bor(kMAD_X_RIGHT())
|> bor(kMAD_Y_DOWN())
end
defp _mad_mode(self = %ILI9486{rotation: 90, mad_mode: :right_down}) do
self
|> _get_channel_order()
|> bor(kMAD_X_LEFT())
|> bor(kMAD_Y_DOWN())
|> bor(kMAD_VERTICAL())
end
defp _mad_mode(self = %ILI9486{rotation: 180, mad_mode: :right_down}) do
self
|> _get_channel_order()
|> bor(kMAD_X_LEFT())
|> bor(kMAD_Y_UP())
end
defp _mad_mode(self = %ILI9486{rotation: 270, mad_mode: :right_down}) do
self
|> _get_channel_order()
|> bor(kMAD_X_RIGHT())
|> bor(kMAD_Y_UP())
|> bor(kMAD_VERTICAL())
end
defp _mad_mode(self = %ILI9486{rotation: 0, mad_mode: :right_up}) do
self
|> _get_channel_order()
|> bor(kMAD_X_RIGHT())
|> bor(kMAD_Y_UP())
end
defp _mad_mode(self = %ILI9486{rotation: 90, mad_mode: :right_up}) do
self
|> _get_channel_order()
|> bor(kMAD_X_RIGHT())
|> bor(kMAD_Y_DOWN())
|> bor(kMAD_VERTICAL())
end
defp _mad_mode(self = %ILI9486{rotation: 180, mad_mode: :right_up}) do
self
|> _get_channel_order()
|> bor(kMAD_X_LEFT())
|> bor(kMAD_Y_DOWN())
end
defp _mad_mode(self = %ILI9486{rotation: 270, mad_mode: :right_up}) do
self
|> _get_channel_order()
|> bor(kMAD_X_LEFT())
|> bor(kMAD_Y_UP())
|> bor(kMAD_VERTICAL())
end
defp _mad_mode(self = %ILI9486{rotation: 0, mad_mode: :rgb_mode}) do
self
|> _get_channel_order()
|> bor(kMAD_X_LEFT())
|> bor(kMAD_Y_DOWN())
end
defp _mad_mode(self = %ILI9486{rotation: 90, mad_mode: :rgb_mode}) do
self
|> _get_channel_order()
|> bor(kMAD_X_RIGHT())
|> bor(kMAD_Y_DOWN())
end
defp _mad_mode(self = %ILI9486{rotation: 180, mad_mode: :rgb_mode}) do
self
|> _get_channel_order()
|> bor(kMAD_X_RIGHT())
|> bor(kMAD_Y_UP())
end
defp _mad_mode(self = %ILI9486{rotation: 270, mad_mode: :rgb_mode}) do
self
|> _get_channel_order()
|> bor(kMAD_X_LEFT())
|> bor(kMAD_Y_UP())
end
defp _init(self = %ILI9486{frame_rate: frame_rate}, false) do
self
# software reset
|> _command(kSWRESET(), delay: 120)
# RGB mode off
|> _command(kRGB_INTERFACE(), cmd_data: 0x00)
# turn off sleep mode
|> _command(kSLPOUT(), delay: 200)
# interface format
|> _command(kPIXFMT(), cmd_data: _get_pix_fmt(self))
|> _command(kMADCTL(), cmd_data: _mad_mode(self))
|> _command(kPWCTR3(), cmd_data: 0x44)
|> _command(kVMCTR1())
|> _data(0x00)
|> _data(0x00)
|> _data(0x00)
|> _data(0x00)
|> _command(kGMCTRP1())
|> _data(0x0F)
|> _data(0x1F)
|> _data(0x1C)
|> _data(0x0C)
|> _data(0x0F)
|> _data(0x08)
|> _data(0x48)
|> _data(0x98)
|> _data(0x37)
|> _data(0x0A)
|> _data(0x13)
|> _data(0x04)
|> _data(0x11)
|> _data(0x0D)
|> _data(0x00)
|> _command(kGMCTRN1())
|> _data(0x0F)
|> _data(0x32)
|> _data(0x2E)
|> _data(0x0B)
|> _data(0x0D)
|> _data(0x05)
|> _data(0x47)
|> _data(0x75)
|> _data(0x37)
|> _data(0x06)
|> _data(0x10)
|> _data(0x03)
|> _data(0x24)
|> _data(0x20)
|> _data(0x00)
|> _command(kDGCTR1())
|> _data(0x0F)
|> _data(0x32)
|> _data(0x2E)
|> _data(0x0B)
|> _data(0x0D)
|> _data(0x05)
|> _data(0x47)
|> _data(0x75)
|> _data(0x37)
|> _data(0x06)
|> _data(0x10)
|> _data(0x03)
|> _data(0x24)
|> _data(0x20)
|> _data(0x00)
|> _set_display_mode(:normal)
|> _command(kINVOFF())
|> _command(kSLPOUT(), delay: 200)
|> _command(kDISPON())
|> _set_frame_rate(frame_rate)
end
defp _init(self = %ILI9486{frame_rate: frame_rate}, true) do
self
# software reset
|> _command(kSWRESET(), delay: 120)
# RGB mode off
|> _command(kRGB_INTERFACE(), cmd_data: 0x00)
# turn off sleep mode
|> _command(kSLPOUT(), delay: 250)
# interface format
|> _command(kPIXFMT(), cmd_data: _get_pix_fmt(self))
|> _command(kPWCTR3(), cmd_data: 0x44)
|> _command(kVMCTR1(), cmd_data: [0x00, 0x00, 0x00, 0x00])
|> _command(kGMCTRP1())
|> _data(0x0F)
|> _data(0x1F)
|> _data(0x1C)
|> _data(0x0C)
|> _data(0x0F)
|> _data(0x08)
|> _data(0x48)
|> _data(0x98)
|> _data(0x37)
|> _data(0x0A)
|> _data(0x13)
|> _data(0x04)
|> _data(0x11)
|> _data(0x0D)
|> _data(0x00)
|> _command(kGMCTRN1())
|> _data(0x0F)
|> _data(0x32)
|> _data(0x2E)
|> _data(0x0B)
|> _data(0x0D)
|> _data(0x05)
|> _data(0x47)
|> _data(0x75)
|> _data(0x37)
|> _data(0x06)
|> _data(0x10)
|> _data(0x03)
|> _data(0x24)
|> _data(0x20)
|> _data(0x00)
|> _command(kDGCTR1())
|> _data(0x0F)
|> _data(0x32)
|> _data(0x2E)
|> _data(0x0B)
|> _data(0x0D)
|> _data(0x05)
|> _data(0x47)
|> _data(0x75)
|> _data(0x37)
|> _data(0x06)
|> _data(0x10)
|> _data(0x03)
|> _data(0x24)
|> _data(0x20)
|> _data(0x00)
|> _set_display_mode(:normal)
|> _command(kINVOFF())
|> _command(kDISPON(), delay: 100)
|> _command(kMADCTL(), cmd_data: _mad_mode(self))
|> _set_frame_rate(frame_rate)
end
defp _set_window(self = %ILI9486{opts: board}, opts = [x0: 0, y0: 0, x1: nil, y2: nil]) do
width = board[:width]
height = board[:height]
offset_top = board[:offset_top]
offset_left = board[:offset_left]
x0 = opts[:x0]
x1 = opts[:x1]
x1 = if x1 == nil, do: width - 1
y0 = opts[:y0]
y1 = opts[:y1]
y1 = if y1 == nil, do: height - 1
y0 = y0 + offset_top
y1 = y1 + offset_top
x0 = x0 + offset_left
x1 = x1 + offset_left
self
|> _command(kCASET())
|> _data(bsr(x0, 8))
|> _data(band(x0, 0xFF))
|> _data(bsr(x1, 8))
|> _data(band(x1, 0xFF))
|> _command(kPASET())
|> _data(bsr(y0, 8))
|> _data(band(y0, 0xFF))
|> _data(bsr(y1, 8))
|> _data(band(y1, 0xFF))
|> _command(kRAMWR())
end
defp _to_565(image_data, source_color, target_color)
when is_binary(image_data) do
image_data
|> CvtColor.cvt(source_color, target_color)
|> :binary.bin_to_list()
end
defp _to_666(image_data, :bgr888, :bgr666)
when is_binary(image_data) do
image_data
|> :binary.bin_to_list()
end
defp _to_666(image_data, source_color, target_color)
when is_binary(image_data) do
image_data
|> CvtColor.cvt(source_color, target_color)
|> :binary.bin_to_list()
end
@doc functions: :constants
def kNOP, do: 0x00
@doc functions: :constants
def kSWRESET, do: 0x01
@doc functions: :constants
def kRDDID, do: 0x04
@doc functions: :constants
def kRDDST, do: 0x09
@doc functions: :constants
def kRDMODE, do: 0x0A
@doc functions: :constants
def kRDMADCTL, do: 0x0B
@doc functions: :constants
def kRDPIXFMT, do: 0x0C
@doc functions: :constants
def kRDIMGFMT, do: 0x0D
@doc functions: :constants
def kRDSELFDIAG, do: 0x0F
@doc functions: :constants
def kSLPIN, do: 0x10
@doc functions: :constants
def kSLPOUT, do: 0x11
@doc functions: :constants
def kPTLON, do: 0x12
@doc functions: :constants
def kNORON, do: 0x13
@doc functions: :constants
def kINVOFF, do: 0x20
@doc functions: :constants
def kINVON, do: 0x21
@doc functions: :constants
def kGAMMASET, do: 0x26
@doc functions: :constants
def kDISPOFF, do: 0x28
@doc functions: :constants
def kDISPON, do: 0x29
@doc functions: :constants
def kCASET, do: 0x2A
@doc functions: :constants
def kPASET, do: 0x2B
@doc functions: :constants
def kRAMWR, do: 0x2C
@doc functions: :constants
def kRAMRD, do: 0x2E
@doc functions: :constants
def kPTLAR, do: 0x30
@doc functions: :constants
def kVSCRDEF, do: 0x33
@doc functions: :constants
def kMADCTL, do: 0x36
@doc functions: :constants
# Vertical Scrolling Start Address
def kVSCRSADD, do: 0x37
@doc functions: :constants
def kIDLEOFF, do: 0x38
@doc functions: :constants
def kIDLEON, do: 0x39
@doc functions: :constants
# COLMOD: Pixel Format Set
def kPIXFMT, do: 0x3A
@doc functions: :constants
# RGB Interface Signal Control
def kRGB_INTERFACE, do: 0xB0
@doc functions: :constants
def kFRMCTR1, do: 0xB1
@doc functions: :constants
def kFRMCTR2, do: 0xB2
@doc functions: :constants
def kFRMCTR3, do: 0xB3
@doc functions: :constants
def kINVCTR, do: 0xB4
# Display Function Control
@doc functions: :constants
def kDFUNCTR, do: 0xB6
@doc functions: :constants
def kPWCTR1, do: 0xC0
@doc functions: :constants
def kPWCTR2, do: 0xC1
@doc functions: :constants
def kPWCTR3, do: 0xC2
@doc functions: :constants
def kPWCTR4, do: 0xC3
@doc functions: :constants
def kPWCTR5, do: 0xC4
@doc functions: :constants
def kVMCTR1, do: 0xC5
@doc functions: :constants
def kVMCTR2, do: 0xC7
@doc functions: :constants
def kRDID1, do: 0xDA
@doc functions: :constants
def kRDID2, do: 0xDB
@doc functions: :constants
def kRDID3, do: 0xDC
@doc functions: :constants
def kRDID4, do: 0xDD
@doc functions: :constants
def kGMCTRP1, do: 0xE0
@doc functions: :constants
def kGMCTRN1, do: 0xE1
@doc functions: :constants
def kDGCTR1, do: 0xE2
@doc functions: :constants
def kDGCTR2, do: 0xE3
@doc functions: :constants
def kMAD_RGB, do: 0x08
@doc functions: :constants
def kMAD_BGR, do: 0x00
@doc functions: :constants
def k18BIT_PIX, do: 0x66
@doc functions: :constants
def k16BIT_PIX, do: 0x55
@doc functions: :constants
def kMAD_VERTICAL, do: 0x20
@doc functions: :constants
def kMAD_X_LEFT, do: 0x00
@doc functions: :constants
def kMAD_X_RIGHT, do: 0x40
@doc functions: :constants
def kMAD_Y_UP, do: 0x80
@doc functions: :constants
def kMAD_Y_DOWN, do: 0x00
@doc functions: :constants
def kHISPEEDF1, do: 0xF1
@doc functions: :constants
def kHISPEEDF2, do: 0xF2
@doc functions: :constants
def kHISPEEDF8, do: 0xF8
@doc functions: :constants
def kHISPEEDF9, do: 0xF9
end
|
lib/ili9486_elixir.ex
| 0.889775
| 0.818809
|
ili9486_elixir.ex
|
starcoder
|
defmodule Membrane.RTP.SessionBin do
@moduledoc """
Bin handling one RTP session, that may consist of multiple incoming and outgoing RTP streams.
## Incoming streams
Incoming RTP streams can be connected via `:rtp_input` pads. As each pad can provide multiple RTP streams,
they are distinguished basing on SSRC. Once a new stream is received, bin sends `t:new_stream_notification_t/0`
notification, meaning the parent should link `Pad.ref(:output, ssrc)` pad to consuming components. The stream is
then depayloaded and forwarded via said pad.
## Outgoing streams
To create an RTP stream, the source stream needs to be connected via `Pad.ref(:input, ssrc)` pad and the sink -
via `Pad.ref(:rtp_output, ssrc)`. At least one of `:encoding` or `:payload_type` options of `:rtp_output` pad
must be provided too.
## Payloaders and depayloaders
Payloaders are Membrane elements that transform stream so that it can be put into RTP packets, while depayloaders
work the other way round. Different codecs require different payloaders and depayloaders. Thus, to send or receive
given codec via this bin, proper payloader/depayloader is needed. Payloaders and depayloaders can be found in
`membrane_rtp_X_plugin` packages, where X stands for codec name. It's enough when such plugin is added to
dependencies.
## RTCP
RTCP packets are received via `:rtcp_input` and sent via `:rtcp_output` pad. Only one instance of each of them
can be linked. RTCP packets should be delivered to each involved peer that supports RTCP.
"""
use Membrane.Bin
require Bitwise
require Membrane.Logger
alias Membrane.{ParentSpec, RemoteStream, RTCP, RTP, SRTCP, SRTP}
alias Membrane.RTP.{PayloadFormat, Session}
@type new_stream_notification_t :: Membrane.RTP.SSRCRouter.new_stream_notification_t()
@ssrc_boundaries 2..(Bitwise.bsl(1, 32) - 1)
@rtp_input_buffer_params [warn_size: 250, fail_size: 500]
def_options fmt_mapping: [
spec: %{RTP.payload_type_t() => {RTP.encoding_name_t(), RTP.clock_rate_t()}},
default: %{},
description: "Mapping of the custom payload types ( > 95)"
],
custom_payloaders: [
spec: %{RTP.encoding_name_t() => module()},
default: %{},
description: "Mapping from encoding names to custom payloader modules"
],
custom_depayloaders: [
spec: %{RTP.encoding_name_t() => module()},
default: %{},
description: "Mapping from encoding names to custom depayloader modules"
],
rtcp_interval: [
type: :time,
default: 5 |> Membrane.Time.seconds(),
description: "Interval between sending subseqent RTCP receiver reports."
],
receiver_ssrc_generator: [
type: :function,
spec:
(local_ssrcs :: [pos_integer], remote_ssrcs :: [pos_integer] ->
ssrc :: pos_integer),
default: &__MODULE__.generate_receiver_ssrc/2,
description: """
Function generating receiver SSRCs. Default one generates random SSRC
that is not in `local_ssrcs` nor `remote_ssrcs`.
"""
],
secure?: [
type: :boolean,
default: false,
description: """
Specifies whether to use SRTP.
Requires adding [srtp](https://github.com/membraneframework/elixir_libsrtp) dependency to work.
"""
],
srtp_policies: [
spec: [ExLibSRTP.Policy.t()],
default: [],
description: """
List of SRTP policies to use for decrypting packets. Used only when `secure?` is set to `true`.
See `t:ExLibSRTP.Policy.t/0` for details.
"""
],
receiver_srtp_policies: [
spec: [ExLibSRTP.Policy.t()] | nil,
default: nil,
description: """
List of SRTP policies to use for encrypting receiver reports and other receiver RTCP packets.
Used only when `secure?` is set to `true`.
Defaults to the value of `srtp_policies`.
See `t:ExLibSRTP.Policy.t/0` for details.
"""
]
@doc false
def generate_receiver_ssrc(local_ssrcs, remote_ssrcs) do
fn -> Enum.random(@ssrc_boundaries) end
|> Stream.repeatedly()
|> Enum.find(&(&1 not in local_ssrcs and &1 not in remote_ssrcs))
end
def_input_pad :input, demand_unit: :buffers, caps: :any, availability: :on_request
def_input_pad :rtp_input,
demand_unit: :buffers,
caps: {RemoteStream, type: :packetized, content_format: one_of([nil, RTP])},
availability: :on_request
def_input_pad :rtcp_input,
demand_unit: :buffers,
caps: {RemoteStream, type: :packetized, content_format: one_of([nil, RTCP])},
availability: :on_request
def_output_pad :output,
demand_unit: :buffers,
caps: :any,
availability: :on_request,
options: [
encoding: [
spec: RTP.encoding_name_t() | nil,
default: nil,
description: """
Encoding name determining depayloader which will be used to produce output stream from RTP stream.
"""
],
clock_rate: [
spec: integer() | nil,
default: nil,
description: """
Clock rate to use. If not provided, determined from `fmt_mapping` or defaults registered by proper plugins i.e.
`Membrane.RTP.X.Plugin` where X is the name of codec corresponding to `encoding`.
"""
],
extensions: [
spec: [:vad],
default: [],
description: """
List of extensions. Currently `:vad` is only supported.
* `:vad` will turn on Voice Activity Detection mechanism firing appropriate notifications when needed.
Should be set only for audio tracks. For more information refer to `Membrane.RTP.VAD` module documentation.
"""
]
]
def_output_pad :rtp_output,
demand_unit: :buffers,
caps: {RemoteStream, type: :packetized, content_format: RTP},
availability: :on_request,
options: [
payload_type: [
spec: RTP.payload_type_t() | nil,
default: nil,
description: """
Payload type of output stream. If not provided, determined from `:encoding`.
"""
],
encoding: [
spec: RTP.encoding_name_t() | nil,
default: nil,
description: """
Encoding name of output stream. If not provided, determined from `:payload_type`.
"""
],
clock_rate: [
spec: integer() | nil,
default: nil,
description: """
Clock rate to use. If not provided, determined from `:payload_type`.
"""
]
]
def_output_pad :rtcp_output,
demand_unit: :buffers,
caps: {RemoteStream, type: :packetized, content_format: RTCP},
availability: :on_request
defmodule State do
@moduledoc false
use Bunch.Access
defstruct fmt_mapping: %{},
ssrc_pt_mapping: %{},
payloaders: nil,
depayloaders: nil,
ssrcs: %{},
senders_ssrcs: %MapSet{},
rtcp_interval: nil,
receiver_ssrc_generator: nil,
rtcp_report_data: %Session.ReceiverReport.Data{},
rtcp_sender_report_data: %Session.SenderReport.Data{},
secure?: nil,
srtp_policies: nil,
receiver_srtp_policies: nil
end
@impl true
def handle_init(options) do
children = [ssrc_router: RTP.SSRCRouter]
links = []
spec = %ParentSpec{children: children, links: links}
{receiver_srtp_policies, options} = Map.pop(options, :receiver_srtp_policies)
{fmt_mapping, options} = Map.pop(options, :fmt_mapping)
fmt_mapping =
Bunch.Map.map_values(fmt_mapping, fn {encoding_name, clock_rate} ->
%{encoding_name: encoding_name, clock_rate: clock_rate}
end)
state =
%State{
receiver_srtp_policies: receiver_srtp_policies || options.srtp_policies,
fmt_mapping: fmt_mapping
}
|> Map.merge(Map.from_struct(options))
{{:ok, spec: spec}, state}
end
@impl true
def handle_pad_added(Pad.ref(:rtp_input, ref) = pad, _ctx, %{secure?: true} = state) do
parser_ref = {:rtp_parser, ref}
decryptor_ref = {:srtp_decryptor, ref}
children = %{
parser_ref => RTP.Parser,
decryptor_ref => %SRTP.Decryptor{policies: state.srtp_policies}
}
links = [
link_bin_input(pad, buffer: @rtp_input_buffer_params)
|> to(decryptor_ref)
|> to(parser_ref)
|> to(:ssrc_router)
]
new_spec = %ParentSpec{children: children, links: links}
{{:ok, spec: new_spec}, state}
end
@impl true
def handle_pad_added(Pad.ref(:rtp_input, ref) = pad, _ctx, state) do
parser_ref = {:rtp_parser, ref}
children = %{parser_ref => RTP.Parser}
links = [
link_bin_input(pad, buffer: @rtp_input_buffer_params)
|> to(parser_ref)
|> to(:ssrc_router)
]
new_spec = %ParentSpec{children: children, links: links}
{{:ok, spec: new_spec}, state}
end
@impl true
def handle_pad_added(Pad.ref(:rtcp_input, ref) = pad, _ctx, %{secure?: true} = state) do
parser_ref = {:rtcp_parser, ref}
decryptor_ref = {:srtcp_decryptor, ref}
children = %{
parser_ref => RTCP.Parser,
decryptor_ref => %SRTCP.Decryptor{policies: state.srtp_policies}
}
links = [link_bin_input(pad) |> to(decryptor_ref) |> to(parser_ref)]
new_spec = %ParentSpec{children: children, links: links}
{{:ok, spec: new_spec}, state}
end
@impl true
def handle_pad_added(Pad.ref(:rtcp_input, ref) = pad, _ctx, state) do
parser_ref = {:rtcp_parser, ref}
children = [{parser_ref, RTCP.Parser}]
links = [link_bin_input(pad) |> to(parser_ref)]
new_spec = %ParentSpec{children: children, links: links}
{{:ok, spec: new_spec}, state}
end
@impl true
def handle_pad_added(Pad.ref(:output, ssrc) = pad, ctx, state) do
%{encoding: encoding_name, clock_rate: clock_rate, extensions: extensions} =
ctx.pads[pad].options
payload_type = Map.fetch!(state.ssrc_pt_mapping, ssrc)
encoding_name = encoding_name || get_from_register!(:encoding_name, payload_type, state)
clock_rate = clock_rate || get_from_register!(:clock_rate, payload_type, state)
depayloader = get_depayloader!(encoding_name, state)
rtp_stream_name = {:stream_receive_bin, ssrc}
new_children = %{
rtp_stream_name => %RTP.StreamReceiveBin{
depayloader: depayloader,
ssrc: ssrc,
clock_rate: clock_rate
}
}
new_links = [
link(:ssrc_router)
|> via_out(Pad.ref(:output, ssrc))
|> to(rtp_stream_name)
|> to_bin_output(pad)
]
{new_children, new_links} =
if extensions == [:vad] do
new_children = Map.merge(new_children, %{{:vad, ssrc} => RTP.VAD})
new_links = [
link(:ssrc_router)
|> via_out(Pad.ref(:output, ssrc))
|> to(rtp_stream_name)
|> to({:vad, ssrc})
|> to_bin_output(pad)
]
{new_children, new_links}
else
{new_children, new_links}
end
new_spec = %ParentSpec{children: new_children, links: new_links}
state = %{state | ssrcs: add_ssrc(ssrc, state.ssrcs, state.receiver_ssrc_generator)}
{{:ok, spec: new_spec}, state}
end
@impl true
def handle_pad_added(Pad.ref(:rtcp_output, _ref) = pad, _ctx, %{secure?: true} = state) do
new_children = [
srtcp_encryptor: %SRTCP.Encryptor{policies: state.receiver_srtp_policies},
rtcp_forwarder: RTCP.Forwarder
]
new_links = [link(:rtcp_forwarder) |> to(:srtcp_encryptor) |> to_bin_output(pad)]
new_spec = %ParentSpec{children: new_children, links: new_links}
{{:ok, spec: new_spec, start_timer: {:rtcp_report_timer, state.rtcp_interval}}, state}
end
@impl true
def handle_pad_added(Pad.ref(:rtcp_output, _ref) = pad, _ctx, state) do
new_children = [rtcp_forwarder: RTCP.Forwarder]
new_links = [link(:rtcp_forwarder) |> to_bin_output(pad)]
new_spec = %ParentSpec{children: new_children, links: new_links}
{{:ok, spec: new_spec, start_timer: {:rtcp_report_timer, state.rtcp_interval}}, state}
end
@impl true
def handle_pad_added(Pad.ref(name, ssrc), ctx, state)
when name in [:input, :rtp_output] do
pads_present? =
Map.has_key?(ctx.pads, Pad.ref(:input, ssrc)) and
Map.has_key?(ctx.pads, Pad.ref(:rtp_output, ssrc))
if not pads_present? or Map.has_key?(ctx.children, {:stream_send_bin, ssrc}) do
{:ok, state}
else
pad = Pad.ref(:rtp_output, ssrc)
%{encoding: encoding_name, clock_rate: clock_rate} = ctx.pads[pad].options
payload_type = get_output_payload_type!(ctx, ssrc)
encoding_name = encoding_name || get_from_register!(:encoding_name, payload_type, state)
clock_rate = clock_rate || get_from_register!(:clock_rate, payload_type, state)
payloader = get_payloader!(encoding_name, state)
spec = sent_stream_spec(ssrc, payload_type, payloader, clock_rate, state)
state = %{state | senders_ssrcs: MapSet.put(state.senders_ssrcs, ssrc)}
{{:ok, spec: spec}, state}
end
end
defp sent_stream_spec(ssrc, payload_type, payloader, clock_rate, %{
secure?: true,
srtp_policies: policies
}) do
children = %{
{:stream_send_bin, ssrc} => %RTP.StreamSendBin{
ssrc: ssrc,
payload_type: payload_type,
payloader: payloader,
clock_rate: clock_rate
},
{:srtp_encryptor, ssrc} => %SRTP.Encryptor{policies: policies}
}
links = [
link_bin_input(Pad.ref(:input, ssrc))
|> to({:stream_send_bin, ssrc})
|> to({:srtp_encryptor, ssrc})
|> to_bin_output(Pad.ref(:rtp_output, ssrc))
]
%ParentSpec{children: children, links: links}
end
defp sent_stream_spec(ssrc, payload_type, payloader, clock_rate, %{secure?: false}) do
children = %{
{:stream_send_bin, ssrc} => %RTP.StreamSendBin{
ssrc: ssrc,
payload_type: payload_type,
payloader: payloader,
clock_rate: clock_rate
}
}
links = [
link_bin_input(Pad.ref(:input, ssrc))
|> to({:stream_send_bin, ssrc})
|> to_bin_output(Pad.ref(:rtp_output, ssrc))
]
%ParentSpec{children: children, links: links}
end
@impl true
def handle_pad_removed(Pad.ref(:rtp_input, ref), _ctx, state) do
children = [rtp_parser: ref] ++ if state.secure?, do: [srtp_decryptor: ref], else: []
{{:ok, remove_child: children}, state}
end
@impl true
def handle_pad_removed(Pad.ref(:rtcp_input, ref), _ctx, state) do
children = [rtcp_parser: ref] ++ if state.secure?, do: [srtcp_decryptor: ref], else: []
{{:ok, remove_child: children}, state}
end
@impl true
def handle_pad_removed(Pad.ref(:output, ssrc), _ctx, state) do
# TODO: parent may not know when to unlink, we need to timout SSRCs and notify about that and BYE packets over RTCP
state = %{state | ssrcs: Map.delete(state.ssrcs, ssrc)}
{{:ok, remove_child: {:stream_receive_bin, ssrc}}, state}
end
@impl true
def handle_pad_removed(Pad.ref(:rtcp_output, _ref), _ctx, state) do
{{:ok, stop_timer: :rtcp_report_timer, remove_child: :rtcp_forwarder}, state}
end
@impl true
def handle_pad_removed(Pad.ref(name, ssrc), ctx, state)
when name in [:input, :rtp_output] do
case Map.fetch(ctx.children, {:stream_send_bin, ssrc}) do
{:ok, %{terminating?: false}} ->
state = %{state | senders_ssrcs: MapSet.delete(state.senders_ssrcs, ssrc)}
{{:ok, remove_child: {:stream_send_bin, ssrc}}, state}
_result ->
{:ok, state}
end
end
@impl true
def handle_tick(:rtcp_report_timer, _ctx, state) do
{maybe_receiver_report, report_data} =
Session.ReceiverReport.flush_report(state.rtcp_report_data)
{remote_ssrcs, report_data} = Session.ReceiverReport.init_report(state.ssrcs, report_data)
{maybe_sender_report, sender_report_data} =
Session.SenderReport.flush_report(state.rtcp_sender_report_data)
{senders_ssrcs, sender_report_data} =
Session.SenderReport.init_report(state.senders_ssrcs, sender_report_data)
sender_stats_requests = Enum.map(senders_ssrcs, &{{:stream_send_bin, &1}, :send_stats})
receiver_stats_requests = Enum.map(remote_ssrcs, &{{:stream_receive_bin, &1}, :send_stats})
receiver_report_messages =
case maybe_receiver_report do
{:report, report} -> [rtcp_forwarder: {:report, report}]
:no_report -> []
end
sender_report_messages =
case maybe_sender_report do
{:report, report} -> [rtcp_forwarder: {:report, report}]
:no_report -> []
end
actions =
Enum.map(
receiver_report_messages ++
receiver_stats_requests ++
sender_report_messages ++
sender_stats_requests,
&{:forward, &1}
)
{{:ok, actions},
%{state | rtcp_report_data: report_data, rtcp_sender_report_data: sender_report_data}}
end
@impl true
def handle_notification({:new_rtp_stream, ssrc, payload_type}, :ssrc_router, _ctx, state) do
state = put_in(state.ssrc_pt_mapping[ssrc], payload_type)
{{:ok, notify: {:new_rtp_stream, ssrc, payload_type}}, state}
end
@impl true
def handle_notification({:received_rtcp, rtcp, timestamp}, {:rtcp_parser, _ref}, _ctx, state) do
report_data =
Session.ReceiverReport.handle_remote_report(rtcp, timestamp, state.rtcp_report_data)
{:ok, %{state | rtcp_report_data: report_data}}
end
@impl true
def handle_notification(
{:serializer_stats, stats},
{:stream_send_bin, sender_ssrc},
ctx,
state
) do
{result, report_data} =
Session.SenderReport.handle_stats(stats, sender_ssrc, state.rtcp_sender_report_data)
{{:ok, forward_action(result, ctx)}, %{state | rtcp_sender_report_data: report_data}}
end
@impl true
def handle_notification(
{:jitter_buffer_stats, stats},
{:stream_receive_bin, remote_ssrc},
ctx,
state
) do
{result, report_data} =
Session.ReceiverReport.handle_stats(stats, remote_ssrc, state.ssrcs, state.rtcp_report_data)
{{:ok, forward_action(result, ctx)}, %{state | rtcp_report_data: report_data}}
end
@impl true
def handle_notification({:vad, _val} = msg, _from, _ctx, state) do
{{:ok, notify: msg}, state}
end
defp forward_action(result, ctx) do
with {:report, report} <- result,
true <- Map.has_key?(ctx.children, :rtcp_forwarder) do
[forward: {:rtcp_forwarder, {:report, report}}]
else
_ -> []
end
end
defp add_ssrc(remote_ssrc, ssrcs, generator) do
local_ssrc = generator.([remote_ssrc | Map.keys(ssrcs)], Map.values(ssrcs))
Map.put(ssrcs, remote_ssrc, local_ssrc)
end
defp get_from_register!(field, pt, state) do
pt_mapping = get_payload_type_mapping!(pt, state)
Map.fetch!(pt_mapping, field)
end
defp get_payload_type_mapping!(payload_type, state) do
pt_mapping =
PayloadFormat.get_payload_type_mapping(payload_type)
|> Map.merge(state.fmt_mapping[payload_type] || %{})
if Map.has_key?(pt_mapping, :encoding_name) and Map.has_key?(pt_mapping, :clock_rate) do
pt_mapping
else
raise "Unknown RTP payload type #{payload_type}"
end
end
defp get_payloader!(encoding_name, state) do
case state.custom_payloaders[encoding_name] || PayloadFormat.get(encoding_name).payloader do
nil -> raise "Cannot find payloader for encoding #{encoding_name}"
payloader -> payloader
end
end
defp get_depayloader!(encoding_name, state) do
case state.custom_depayloaders[encoding_name] || PayloadFormat.get(encoding_name).depayloader do
nil -> raise "Cannot find depayloader for encoding #{encoding_name}"
depayloader -> depayloader
end
end
defp get_output_payload_type!(ctx, ssrc) do
pad = Pad.ref(:rtp_output, ssrc)
%{payload_type: pt, encoding: encoding} = ctx.pads[pad].options
unless pt || encoding do
raise "Neither payload_type nor encoding specified for #{inspect(pad)})"
end
pt || PayloadFormat.get(encoding).payload_type ||
raise "Cannot find default RTP payload type for encoding #{encoding}"
end
end
|
lib/membrane/rtp/session_bin.ex
| 0.830422
| 0.515681
|
session_bin.ex
|
starcoder
|
defmodule Exdis.CommandParsers.Util do
## ------------------------------------------------------------------
## RESP Type Coercion - To String
## ------------------------------------------------------------------
def maybe_coerce_into_string({:string, string}) do
{:ok, string}
end
def maybe_coerce_into_string({:integer, integer}) do
{:ok, Exdis.Int64.to_decimal_string(integer)}
end
def maybe_coerce_into_string(_) do
{:error, :unsupported_conversion}
end
## ------------------------------------------------------------------
## RESP Type Coercion - To Int64
## ------------------------------------------------------------------
def maybe_coerce_into_int64({:integer, integer}) do
{:ok, Exdis.Int64.new(integer)}
end
def maybe_coerce_into_int64({:string, string}) do
Exdis.Int64.from_decimal_string(string)
end
def maybe_coerce_into_int64(_) do
{:error, :unsupported_conversion}
end
## ------------------------------------------------------------------
## RESP Type Coercion - To Float
## ------------------------------------------------------------------
def maybe_coerce_into_float({:string, string}) do
Exdis.Float.from_decimal_string(string)
end
def maybe_coerce_into_float({:integer, integer}) do
Exdis.Float.from_integer(integer)
end
def maybe_coerce_into_float(_) do
{:error, :unsupported_conversion}
end
## ------------------------------------------------------------------
## RESP Type Coercion - To Bit
## ------------------------------------------------------------------
def maybe_coerce_into_bit({:integer, integer}) do
case integer in [0, 1] do
true ->
{:ok, integer}
false ->
{:error, {:integer_value_not_in_range, integer}}
end
end
def maybe_coerce_into_bit({:string, string}) do
case Exdis.Int64.from_decimal_string(string) do
{:ok, integer} ->
maybe_coerce_into_bit({:integer, integer})
{:error, reason} ->
{:error, reason}
end
end
def maybe_coerce_into_bit(_) do
{:error, :unsupported_conversion}
end
## ------------------------------------------------------------------
## Variadic Argument Helpers: Parsing String Lists
## ------------------------------------------------------------------
def parse_string_list(list, opts \\ []) do
parse_string_list_recur(list, opts, [])
end
defp parse_string_list_recur([{:string, string} | next], opts, acc) do
acc = [string | acc]
parse_string_list_recur(next, opts, acc)
end
defp parse_string_list_recur([], opts, acc) do
cond do
(:non_empty in opts) and (acc === []) ->
{:error, :empty_list}
:unique in opts ->
{:ok, Enum.uniq(acc)}
:unstable in opts ->
{:ok, acc}
true ->
{:ok, Enum.reverse(acc)}
end
end
## ------------------------------------------------------------------
## Variadic Argument Helpers: Parsing Key-Value Lists
## ------------------------------------------------------------------
def parse_and_unzip_kvlist(list, opts \\ []) do
parse_and_unzip_kvlist_recur(list, opts, [], [])
end
defp parse_and_unzip_kvlist_recur(
[{:string, key_name}, resp_value | next], opts, key_names_acc, values_acc)
do
case maybe_coerce_into_string(resp_value) do
{:ok, value} ->
key_names_acc = [key_name | key_names_acc]
values_acc = [value | values_acc]
parse_and_unzip_kvlist_recur(next, opts, key_names_acc, values_acc)
{:error, reason} ->
{:error, {:value_not_string, %{value: resp_value, reason: reason}}}
end
end
defp parse_and_unzip_kvlist_recur([], opts, key_names_acc, values_acc) do
cond do
(:non_empty in opts) and (key_names_acc === []) ->
{:error, :empty_list}
:unique in opts ->
pairs = Enum.zip(key_names_acc, values_acc)
unique_pairs = :lists.ukeysort(1, pairs)
{keys, values} = Enum.unzip(unique_pairs)
{:ok, keys, values}
:unstable in opts ->
{:ok, key_names_acc, values_acc}
true ->
key_names = Enum.reverse(key_names_acc)
values = Enum.reverse(values_acc)
{:ok, key_names, values}
end
end
defp parse_and_unzip_kvlist_recur([unpaired_entry], _opts, _key_names_acc, _values_acc) do
{:error, {:unpaired_entry, unpaired_entry}}
end
end
|
lib/exdis/command_parsers/util.ex
| 0.533884
| 0.519582
|
util.ex
|
starcoder
|
defmodule WechatPay do
@moduledoc """
WechatPay provide toolkit for Wechat Payment Platform.
### Setup
You need to define you own pay module, then `use` WechatPay:
```elixir
defmodule MyPay do
use WechatPay, otp_app: :my_app
end
```
Then config your app in `config/config.exs`:
```elixir
config :my_app, MyPay,
appid: "the-appid",
mch_id: "the-mch-id",
apikey: "the-apikey",
ssl_cacert: File.read!("fixture/certs/rootca.pem"),
ssl_cert: File.read!("fixture/certs/apiclient_cert.pem"),
ssl_key: File.read!("fixture/certs/apiclient_key.pem")
```
If this does not fit your needs, you might want to check [Configuration](configuration.html).
> NOTE: WechatPay provide `Mix.Tasks.WechatPay.GetSandboxSignkey` to
> fetch the Sandbox API Key.
### Payment methods
When `use` WechatPay in `MyPay` module, it will generate following
modules for you:
- `MyPay.App` - Implements the `WechatPay.App.Behaviour` behaviour
- `MyPay.JSAPI` - Implements the `WechatPay.JSAPI.Behaviour` behaviour
- `MyPay.Native` - Implements the `WechatPay.Native.Behaviour` behaviour
### Plug
WechatPay will also generate some [Plugs](https://github.com/elixir-plug/plug) to
simplify the process of handling notification from Wechat's Payment Gateway:
- `MyPay.Plug.Payment` - Implements the `WechatPay.Plug.Payment` behaviour
- `MyPay.Plug.Refund` - Implements the `WechatPay.Plug.Refund` behaviour
### JSON Encoder
By default, `WechatPay` use `Jason` to encode JSON, if you want to use `Poison`,
you can configure `:wechat_pay` application with:
```elixir
config :wechat_pay, :json_library, Poison
```
"""
alias WechatPay.Config
defmacro __using__(opts) do
opts =
opts
|> Enum.into(%{})
config_ast =
case opts do
%{otp_app: otp_app} ->
quote do
@behaviour Config
def config do
unquote(otp_app)
|> Application.fetch_env!(__MODULE__)
|> Config.new()
end
end
_ ->
quote do
@behaviour Config
end
end
module_ast =
quote do
# define module `MyModule.App`
__MODULE__
|> Module.concat(:App)
|> Module.create(
quote do
use WechatPay.App, unquote(__MODULE__)
end,
Macro.Env.location(__ENV__)
)
# define module `MyModule.JSAPI`
__MODULE__
|> Module.concat(:JSAPI)
|> Module.create(
quote do
use WechatPay.JSAPI, unquote(__MODULE__)
end,
Macro.Env.location(__ENV__)
)
# define module `MyModule.Native`
__MODULE__
|> Module.concat(:Native)
|> Module.create(
quote do
use WechatPay.Native, unquote(__MODULE__)
end,
Macro.Env.location(__ENV__)
)
# define module `MyModule.Plug.Payment` & `MyModule.Plug.Refund`
if Code.ensure_loaded?(Plug) do
[__MODULE__, :Plug, :Payment]
|> Module.concat()
|> Module.create(
quote do
use WechatPay.Plug.Payment, unquote(__MODULE__)
end,
Macro.Env.location(__ENV__)
)
[__MODULE__, :Plug, :Refund]
|> Module.concat()
|> Module.create(
quote do
use WechatPay.Plug.Refund, unquote(__MODULE__)
end,
Macro.Env.location(__ENV__)
)
end
end
[config_ast, module_ast]
end
end
|
lib/wechat_pay.ex
| 0.752649
| 0.624694
|
wechat_pay.ex
|
starcoder
|
defmodule AWS.Cognito.Sync do
@moduledoc """
Amazon Cognito Sync
Amazon Cognito Sync provides an AWS service and client library that enable
cross-device syncing of application-related user data. High-level client
libraries are available for both iOS and Android. You can use these
libraries to persist data locally so that it's available even if the device
is offline. Developer credentials don't need to be stored on the mobile
device to access the service. You can use Amazon Cognito to obtain a
normalized user ID and credentials. User data is persisted in a dataset
that can store up to 1 MB of key-value pairs, and you can have up to 20
datasets per user identity.
With Amazon Cognito Sync, the data stored for each identity is accessible
only to credentials assigned to that identity. In order to use the Cognito
Sync service, you need to make API calls using credentials retrieved with
[Amazon Cognito Identity
service](http://docs.aws.amazon.com/cognitoidentity/latest/APIReference/Welcome.html).
If you want to use Cognito Sync in an Android or iOS application, you will
probably want to make API calls via the AWS Mobile SDK. To learn more, see
the [Developer Guide for
Android](http://docs.aws.amazon.com/mobile/sdkforandroid/developerguide/cognito-sync.html)
and the [Developer Guide for
iOS](http://docs.aws.amazon.com/mobile/sdkforios/developerguide/cognito-sync.html).
"""
@doc """
Initiates a bulk publish of all existing datasets for an Identity Pool to
the configured stream. Customers are limited to one successful bulk publish
per 24 hours. Bulk publish is an asynchronous request, customers can see
the status of the request via the GetBulkPublishDetails operation.
This API can only be called with developer credentials. You cannot call
this API with the temporary user credentials provided by Cognito Identity.
"""
def bulk_publish(client, identity_pool_id, input, options \\ []) do
url = "/identitypools/#{URI.encode(identity_pool_id)}/bulkpublish"
headers = []
request(client, :post, url, headers, input, options, 200)
end
@doc """
Deletes the specific dataset. The dataset will be deleted permanently, and
the action can't be undone. Datasets that this dataset was merged with will
no longer report the merge. Any subsequent operation on this dataset will
result in a ResourceNotFoundException.
This API can be called with temporary user credentials provided by Cognito
Identity or with developer credentials.
"""
def delete_dataset(client, dataset_name, identity_id, identity_pool_id, input, options \\ []) do
url = "/identitypools/#{URI.encode(identity_pool_id)}/identities/#{URI.encode(identity_id)}/datasets/#{URI.encode(dataset_name)}"
headers = []
request(client, :delete, url, headers, input, options, 200)
end
@doc """
Gets meta data about a dataset by identity and dataset name. With Amazon
Cognito Sync, each identity has access only to its own data. Thus, the
credentials used to make this API call need to have access to the identity
data.
This API can be called with temporary user credentials provided by Cognito
Identity or with developer credentials. You should use Cognito Identity
credentials to make this API call.
"""
def describe_dataset(client, dataset_name, identity_id, identity_pool_id, options \\ []) do
url = "/identitypools/#{URI.encode(identity_pool_id)}/identities/#{URI.encode(identity_id)}/datasets/#{URI.encode(dataset_name)}"
headers = []
request(client, :get, url, headers, nil, options, 200)
end
@doc """
Gets usage details (for example, data storage) about a particular identity
pool.
This API can only be called with developer credentials. You cannot call
this API with the temporary user credentials provided by Cognito Identity.
"""
def describe_identity_pool_usage(client, identity_pool_id, options \\ []) do
url = "/identitypools/#{URI.encode(identity_pool_id)}"
headers = []
request(client, :get, url, headers, nil, options, 200)
end
@doc """
Gets usage information for an identity, including number of datasets and
data usage.
This API can be called with temporary user credentials provided by Cognito
Identity or with developer credentials.
"""
def describe_identity_usage(client, identity_id, identity_pool_id, options \\ []) do
url = "/identitypools/#{URI.encode(identity_pool_id)}/identities/#{URI.encode(identity_id)}"
headers = []
request(client, :get, url, headers, nil, options, 200)
end
@doc """
Get the status of the last BulkPublish operation for an identity pool.
This API can only be called with developer credentials. You cannot call
this API with the temporary user credentials provided by Cognito Identity.
"""
def get_bulk_publish_details(client, identity_pool_id, input, options \\ []) do
url = "/identitypools/#{URI.encode(identity_pool_id)}/getBulkPublishDetails"
headers = []
request(client, :post, url, headers, input, options, 200)
end
@doc """
Gets the events and the corresponding Lambda functions associated with an
identity pool.
This API can only be called with developer credentials. You cannot call
this API with the temporary user credentials provided by Cognito Identity.
"""
def get_cognito_events(client, identity_pool_id, options \\ []) do
url = "/identitypools/#{URI.encode(identity_pool_id)}/events"
headers = []
request(client, :get, url, headers, nil, options, 200)
end
@doc """
Gets the configuration settings of an identity pool.
This API can only be called with developer credentials. You cannot call
this API with the temporary user credentials provided by Cognito Identity.
"""
def get_identity_pool_configuration(client, identity_pool_id, options \\ []) do
url = "/identitypools/#{URI.encode(identity_pool_id)}/configuration"
headers = []
request(client, :get, url, headers, nil, options, 200)
end
@doc """
Lists datasets for an identity. With Amazon Cognito Sync, each identity has
access only to its own data. Thus, the credentials used to make this API
call need to have access to the identity data.
ListDatasets can be called with temporary user credentials provided by
Cognito Identity or with developer credentials. You should use the Cognito
Identity credentials to make this API call.
"""
def list_datasets(client, identity_id, identity_pool_id, options \\ []) do
url = "/identitypools/#{URI.encode(identity_pool_id)}/identities/#{URI.encode(identity_id)}/datasets"
headers = []
request(client, :get, url, headers, nil, options, 200)
end
@doc """
Gets a list of identity pools registered with Cognito.
ListIdentityPoolUsage can only be called with developer credentials. You
cannot make this API call with the temporary user credentials provided by
Cognito Identity.
"""
def list_identity_pool_usage(client, options \\ []) do
url = "/identitypools"
headers = []
request(client, :get, url, headers, nil, options, 200)
end
@doc """
Gets paginated records, optionally changed after a particular sync count
for a dataset and identity. With Amazon Cognito Sync, each identity has
access only to its own data. Thus, the credentials used to make this API
call need to have access to the identity data.
ListRecords can be called with temporary user credentials provided by
Cognito Identity or with developer credentials. You should use Cognito
Identity credentials to make this API call.
"""
def list_records(client, dataset_name, identity_id, identity_pool_id, options \\ []) do
url = "/identitypools/#{URI.encode(identity_pool_id)}/identities/#{URI.encode(identity_id)}/datasets/#{URI.encode(dataset_name)}/records"
headers = []
request(client, :get, url, headers, nil, options, 200)
end
@doc """
Registers a device to receive push sync notifications.
This API can only be called with temporary credentials provided by Cognito
Identity. You cannot call this API with developer credentials.
"""
def register_device(client, identity_id, identity_pool_id, input, options \\ []) do
url = "/identitypools/#{URI.encode(identity_pool_id)}/identity/#{URI.encode(identity_id)}/device"
headers = []
request(client, :post, url, headers, input, options, 200)
end
@doc """
Sets the AWS Lambda function for a given event type for an identity pool.
This request only updates the key/value pair specified. Other key/values
pairs are not updated. To remove a key value pair, pass a empty value for
the particular key.
This API can only be called with developer credentials. You cannot call
this API with the temporary user credentials provided by Cognito Identity.
"""
def set_cognito_events(client, identity_pool_id, input, options \\ []) do
url = "/identitypools/#{URI.encode(identity_pool_id)}/events"
headers = []
request(client, :post, url, headers, input, options, 200)
end
@doc """
Sets the necessary configuration for push sync.
This API can only be called with developer credentials. You cannot call
this API with the temporary user credentials provided by Cognito Identity.
"""
def set_identity_pool_configuration(client, identity_pool_id, input, options \\ []) do
url = "/identitypools/#{URI.encode(identity_pool_id)}/configuration"
headers = []
request(client, :post, url, headers, input, options, 200)
end
@doc """
Subscribes to receive notifications when a dataset is modified by another
device.
This API can only be called with temporary credentials provided by Cognito
Identity. You cannot call this API with developer credentials.
"""
def subscribe_to_dataset(client, dataset_name, device_id, identity_id, identity_pool_id, input, options \\ []) do
url = "/identitypools/#{URI.encode(identity_pool_id)}/identities/#{URI.encode(identity_id)}/datasets/#{URI.encode(dataset_name)}/subscriptions/#{URI.encode(device_id)}"
headers = []
request(client, :post, url, headers, input, options, 200)
end
@doc """
Unsubscribes from receiving notifications when a dataset is modified by
another device.
This API can only be called with temporary credentials provided by Cognito
Identity. You cannot call this API with developer credentials.
"""
def unsubscribe_from_dataset(client, dataset_name, device_id, identity_id, identity_pool_id, input, options \\ []) do
url = "/identitypools/#{URI.encode(identity_pool_id)}/identities/#{URI.encode(identity_id)}/datasets/#{URI.encode(dataset_name)}/subscriptions/#{URI.encode(device_id)}"
headers = []
request(client, :delete, url, headers, input, options, 200)
end
@doc """
Posts updates to records and adds and deletes records for a dataset and
user.
The sync count in the record patch is your last known sync count for that
record. The server will reject an UpdateRecords request with a
ResourceConflictException if you try to patch a record with a new value but
a stale sync count.
For example, if the sync count on the server is 5 for a key called
highScore and you try and submit a new highScore with sync count of 4, the
request will be rejected. To obtain the current sync count for a record,
call ListRecords. On a successful update of the record, the response
returns the new sync count for that record. You should present that sync
count the next time you try to update that same record. When the record
does not exist, specify the sync count as 0.
This API can be called with temporary user credentials provided by Cognito
Identity or with developer credentials.
"""
def update_records(client, dataset_name, identity_id, identity_pool_id, input, options \\ []) do
url = "/identitypools/#{URI.encode(identity_pool_id)}/identities/#{URI.encode(identity_id)}/datasets/#{URI.encode(dataset_name)}"
headers = []
{h, i} = case Map.has_key?(input, "ClientContext") do
true ->
headers = [{"x-amz-Client-Context", input["ClientContext"]}|headers]
input = Map.delete(input, "ClientContext")
{headers, input}
false ->
{headers, input}
end
request(client, :post, url, h, i, options, 200)
end
defp request(client, method, url, headers, input, options, success_status_code) do
client = %{client | service: "cognito-sync"}
host = get_host("cognito-sync", client)
url = get_url(host, url, client)
headers = Enum.concat([{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"}],
headers)
payload = encode_payload(input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(method, url, payload, headers, options, success_status_code)
end
defp perform_request(method, url, payload, headers, options, nil) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, response=%HTTPoison.Response{status_code: 202, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, response=%HTTPoison.Response{status_code: 204, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
reason = Poison.Parser.parse!(body)["message"]
{:error, reason}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp perform_request(method, url, payload, headers, options, success_status_code) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: ^success_status_code, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: ^success_status_code, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
reason = Poison.Parser.parse!(body)["message"]
{:error, reason}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, url, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{url}/"
end
defp encode_payload(input) do
if input != nil do
Poison.Encoder.encode(input, [])
else
""
end
end
end
|
lib/aws/cognito_sync.ex
| 0.798462
| 0.530601
|
cognito_sync.ex
|
starcoder
|
defmodule FastSanitize.Fragment do
@moduledoc "Processing of HTML fragment trees."
import Plug.HTML, only: [html_escape_to_iodata: 1]
def to_tree(bin) do
with {:ok, fragment} <-
:fast_html.decode_fragment(bin,
format: [:nil_self_closing, :comment_tuple3, :html_atoms]
) do
{:ok, fragment}
else
e ->
{:error, e}
end
end
defp build_attr_chunks([]), do: ""
defp build_attr_chunks(attrs) do
List.foldr(attrs, [], fn {k, v}, iodata ->
[[" ", html_escape_to_iodata(k), "=\"", html_escape_to_iodata(v), "\""] | iodata]
end)
end
defp build_self_closing_tag(tag, attrs),
do: ["<", to_string(tag), build_attr_chunks(attrs), "/>"]
defp build_start_tag(tag, []),
do: ["<", to_string(tag), ">"]
defp build_start_tag(tag, attrs),
do: ["<", to_string(tag), build_attr_chunks(attrs), ">"]
# text node
defp fragment_to_html("" <> _ = text, _), do: html_escape_to_iodata(text)
# empty tuple - fragment was clobbered, return nothing
defp fragment_to_html(nil, _), do: ""
defp fragment_to_html({}, _), do: ""
# comment node
defp fragment_to_html({:comment, _, text}, _), do: ["<!--", text, "-->"]
# a node which can never accept children will have nil instead of a subtree
defp fragment_to_html({tag, attrs, nil}, _), do: build_self_closing_tag(tag, attrs)
# every other case, assume a subtree
defp fragment_to_html({tag, attrs, subtree}, scrubber) do
start_tag = build_start_tag(tag, attrs)
subtree = subtree_to_iodata(subtree, scrubber)
[start_tag, subtree, "</", to_string(tag), ">"]
end
# bare subtree
defp fragment_to_html([], _), do: ""
defp fragment_to_html([_head | _tail] = subtree, scrubber) do
subtree_to_iodata(subtree, scrubber)
end
defp subtree_to_html([], _), do: {:ok, ""}
defp subtree_to_html(tree, scrubber) do
iodata = subtree_to_iodata(tree, scrubber)
rendered = :erlang.iolist_to_binary(iodata)
{:ok, rendered}
end
defp subtree_to_iodata(tree, scrubber) do
List.foldr(tree, [], fn node, iodata ->
[fragment_to_html(scrubber.scrub(node), scrubber) | iodata]
end)
end
def to_html(tree, scrubber \\ FastSanitize.Sanitizer.Dummy),
do: subtree_to_html(tree, scrubber)
end
|
lib/fast_sanitize/fragment.ex
| 0.66454
| 0.433502
|
fragment.ex
|
starcoder
|
defmodule Stream.Reducers do
# Collection of reducers shared by Enum and Stream.
@moduledoc false
defmacro chunk(amount, step, limit, fun \\ nil) do
quote do
fn entry, acc(head, {buffer, count}, tail) ->
buffer = [entry | buffer]
count = count + 1
new_state =
if count >= unquote(limit) do
left = count - unquote(step)
{Enum.take(buffer, left), left}
else
{buffer, count}
end
if count == unquote(amount) do
next_with_acc(unquote(fun), :lists.reverse(buffer), head, new_state, tail)
else
skip(acc(head, new_state, tail))
end
end
end
end
defmacro chunk_by(callback, fun \\ nil) do
quote do
fn
entry, acc(head, {buffer, value}, tail) ->
new_value = unquote(callback).(entry)
if new_value == value do
skip(acc(head, {[entry | buffer], value}, tail))
else
next_with_acc(unquote(fun), :lists.reverse(buffer), head, {[entry], new_value}, tail)
end
entry, acc(head, nil, tail) ->
skip(acc(head, {[entry], unquote(callback).(entry)}, tail))
end
end
end
defmacro dedup(callback, fun \\ nil) do
quote do
fn(entry, acc(head, prev, tail) = acc) ->
value = unquote(callback).(entry)
case prev do
{:value, ^value} -> skip(acc)
_ -> next_with_acc(unquote(fun), entry, head, {:value, value}, tail)
end
end
end
end
defmacro drop(fun \\ nil) do
quote do
fn
_entry, acc(head, amount, tail) when amount > 0 ->
skip(acc(head, amount - 1, tail))
entry, acc(head, amount, tail) ->
next_with_acc(unquote(fun), entry, head, amount, tail)
end
end
end
defmacro drop_every(nth, fun \\ nil) do
quote do
fn
entry, acc(head, curr, tail) when curr in [unquote(nth), :first] ->
skip(acc(head, 1, tail))
entry, acc(head, curr, tail) ->
next_with_acc(unquote(fun), entry, head, curr + 1, tail)
end
end
end
defmacro drop_while(callback, fun \\ nil) do
quote do
fn entry, acc(head, bool, tail) = original ->
if bool and unquote(callback).(entry) do
skip(original)
else
next_with_acc(unquote(fun), entry, head, false, tail)
end
end
end
end
defmacro filter(callback, fun \\ nil) do
quote do
fn(entry, acc) ->
if unquote(callback).(entry) do
next(unquote(fun), entry, acc)
else
skip(acc)
end
end
end
end
defmacro filter_map(filter, mapper, fun \\ nil) do
quote do
fn(entry, acc) ->
if unquote(filter).(entry) do
next(unquote(fun), unquote(mapper).(entry), acc)
else
skip(acc)
end
end
end
end
defmacro map(callback, fun \\ nil) do
quote do
fn(entry, acc) ->
next(unquote(fun), unquote(callback).(entry), acc)
end
end
end
defmacro map_every(nth, mapper, fun \\ nil) do
quote do
fn
entry, acc(head, curr, tail) when curr in [unquote(nth), :first] ->
next_with_acc(unquote(fun), unquote(mapper).(entry), head, 1, tail)
entry, acc(head, curr, tail) ->
next_with_acc(unquote(fun), entry, head, curr + 1, tail)
end
end
end
defmacro reject(callback, fun \\ nil) do
quote do
fn(entry, acc) ->
unless unquote(callback).(entry) do
next(unquote(fun), entry, acc)
else
skip(acc)
end
end
end
end
defmacro scan_2(callback, fun \\ nil) do
quote do
fn
entry, acc(head, :first, tail) ->
next_with_acc(unquote(fun), entry, head, {:ok, entry}, tail)
entry, acc(head, {:ok, acc}, tail) ->
value = unquote(callback).(entry, acc)
next_with_acc(unquote(fun), value, head, {:ok, value}, tail)
end
end
end
defmacro scan_3(callback, fun \\ nil) do
quote do
fn(entry, acc(head, acc, tail)) ->
value = unquote(callback).(entry, acc)
next_with_acc(unquote(fun), value, head, value, tail)
end
end
end
defmacro take(fun \\ nil) do
quote do
fn(entry, acc(head, curr, tail) = original) ->
case curr do
0 ->
{:halt, original}
1 ->
case next_with_acc(unquote(fun), entry, head, 0, tail) do
{:cont, acc} -> {:halt, acc}
reason -> reason
end
_ ->
next_with_acc(unquote(fun), entry, head, curr - 1, tail)
end
end
end
end
defmacro take_every(nth, fun \\ nil) do
quote do
fn
entry, acc(head, curr, tail) when curr in [unquote(nth), :first] ->
next_with_acc(unquote(fun), entry, head, 1, tail)
entry, acc(head, curr, tail) ->
skip(acc(head, curr + 1, tail))
end
end
end
defmacro take_while(callback, fun \\ nil) do
quote do
fn(entry, acc) ->
if unquote(callback).(entry) do
next(unquote(fun), entry, acc)
else
{:halt, acc}
end
end
end
end
defmacro uniq_by(callback, fun \\ nil) do
quote do
fn(entry, acc(head, prev, tail) = original) ->
value = unquote(callback).(entry)
if Map.has_key?(prev, value) do
skip(original)
else
next_with_acc(unquote(fun), entry, head, Map.put(prev, value, true), tail)
end
end
end
end
defmacro with_index(fun \\ nil) do
quote do
fn(entry, acc(head, counter, tail)) ->
next_with_acc(unquote(fun), {entry, counter}, head, counter + 1, tail)
end
end
end
end
|
lib/elixir/lib/stream/reducers.ex
| 0.62601
| 0.487429
|
reducers.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.ScheduleEntryLockYearDaySet do
@moduledoc """
This command sets or erases a schedule slot for a identified user who already has valid user access code
Params:
* `:set_action` - Indicates whether to erase or modify
* `:user_identifier` - The User Identifier is used to recognize the user identity.
* `:schedule_slot_id` - A value from 1 to Number of Slots Daily Repeating Supported
* `:start_year` - A value from 0 to 99 that represents the 2 year in the century.
* `:start_month` - A value from 1 to 12 that represents the month in a year.
* `:start_day` - A value from 1 to 31 that represents the date of the month
* `:start_hour` - A value from 0 to 23 representing the starting hour of the time fence.
* `:start_minute` - A value from 0 to 59 representing the starting minute of the time fence.
* `:stop_year` - A value from 0 to 99 that represents the 2 year in the century.
* `:stop_month` - A value from 1 to 12 that represents the month in a year.
* `:stop_day` - A value from 1 to 31 that represents the date of the month.
* `:stop_hour` - A value from 0 to 23 representing the stop hour of the time fence.
* `:stop_minute` - A value from 0 to 59 representing the stop minute of the time fence
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.{Command, DecodeError}
alias Grizzly.ZWave.CommandClasses.ScheduleEntryLock
@type param ::
{:set_action, :erase | :modify}
| {:user_identifier, byte()}
| {:schedule_slot_id, byte()}
| {:start_year, 0..99}
| {:start_month, 1..12}
| {:start_day, 1..31}
| {:start_hour, 0..23}
| {:start_minute, 0..59}
| {:stop_year, 0..99}
| {:stop_month, 1..12}
| {:stop_day, 1..31}
| {:stop_hour, 0..23}
| {:stop_minute, 0..59}
@impl true
@spec new([param()]) :: {:ok, Command.t()}
def new(params) do
command = %Command{
name: :schedule_entry_lock_year_day_set,
command_byte: 0x06,
command_class: ScheduleEntryLock,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
@spec encode_params(Command.t()) :: binary()
def encode_params(command) do
set_action = Command.param!(command, :set_action)
user_identifier = Command.param!(command, :user_identifier)
schedule_slot_id = Command.param!(command, :schedule_slot_id)
start_year = Command.param!(command, :start_year)
start_month = Command.param!(command, :start_month)
start_day = Command.param!(command, :start_day)
start_hour = Command.param!(command, :start_hour)
start_minute = Command.param!(command, :start_minute)
stop_year = Command.param!(command, :stop_year)
stop_month = Command.param!(command, :stop_month)
stop_day = Command.param!(command, :stop_day)
stop_hour = Command.param!(command, :stop_hour)
stop_minute = Command.param!(command, :stop_minute)
action_byte = action_to_byte(set_action)
<<action_byte, user_identifier, schedule_slot_id, start_year, start_month, start_day,
start_hour, start_minute, stop_year, stop_month, stop_day, stop_hour, stop_minute>>
end
@impl true
@spec decode_params(binary()) :: {:ok, [param()]} | {:error, DecodeError.t()}
def decode_params(
<<action_byte, user_identifier, schedule_slot_id, start_year, start_month, start_day,
start_hour, start_minute, stop_year, stop_month, stop_day, stop_hour, stop_minute>>
) do
{:ok,
[
set_action: byte_to_action(action_byte),
user_identifier: user_identifier,
schedule_slot_id: schedule_slot_id,
start_year: start_year,
start_month: start_month,
start_day: start_day,
start_hour: start_hour,
start_minute: start_minute,
stop_year: stop_year,
stop_month: stop_month,
stop_day: stop_day,
stop_hour: stop_hour,
stop_minute: stop_minute
]}
end
defp action_to_byte(:erase), do: 0x00
defp action_to_byte(:modify), do: 0x01
defp byte_to_action(0x00), do: :erase
defp byte_to_action(0x01), do: :modify
end
|
lib/grizzly/zwave/commands/schedule_entry_lock_year_day_set.ex
| 0.864982
| 0.660518
|
schedule_entry_lock_year_day_set.ex
|
starcoder
|
defmodule OMG.Eth.RootChain.Abi do
@moduledoc """
Functions that provide ethereum log decoding
"""
alias ExPlasma.Crypto
alias OMG.Eth.Encoding
alias OMG.Eth.RootChain.AbiEventSelector
alias OMG.Eth.RootChain.AbiFunctionSelector
alias OMG.Eth.RootChain.Fields
def decode_function(enriched_data, signature) do
"0x" <> data = enriched_data
<<method_id::binary-size(4), _::binary>> = Crypto.keccak_hash(signature)
method_id |> Encoding.to_hex() |> Kernel.<>(data) |> Encoding.from_hex() |> decode_function()
end
def decode_function(enriched_data) do
function_specs =
Enum.reduce(AbiFunctionSelector.module_info(:exports), [], fn
{:module_info, 0}, acc -> acc
{function, 0}, acc -> [apply(AbiFunctionSelector, function, []) | acc]
_, acc -> acc
end)
{function_spec, data} = ABI.find_and_decode(function_specs, enriched_data)
decode_function_call_result(function_spec, data)
end
def decode_log(log) do
event_specs =
Enum.reduce(AbiEventSelector.module_info(:exports), [], fn
{:module_info, 0}, acc -> acc
{function, 0}, acc -> [apply(AbiEventSelector, function, []) | acc]
_, acc -> acc
end)
topics =
Enum.map(log["topics"], fn
nil -> nil
topic -> Encoding.from_hex(topic)
end)
data = Encoding.from_hex(log["data"])
{event_spec, data} =
ABI.Event.find_and_decode(
event_specs,
Enum.at(topics, 0),
Enum.at(topics, 1),
Enum.at(topics, 2),
Enum.at(topics, 3),
data
)
data
|> Enum.into(%{}, fn {key, _type, _indexed, value} -> {key, value} end)
|> Fields.rename(event_spec)
|> common_parse_event(log)
end
def common_parse_event(
result,
%{"blockNumber" => eth_height, "transactionHash" => root_chain_txhash, "logIndex" => log_index} = event
) do
# NOTE: we're using `put_new` here, because `merge` would allow us to overwrite data fields in case of conflict
result
|> Map.put_new(:eth_height, Encoding.int_from_hex(eth_height))
|> Map.put_new(:root_chain_txhash, Encoding.from_hex(root_chain_txhash))
|> Map.put_new(:log_index, Encoding.int_from_hex(log_index))
# just copy `event_signature` over, if it's present (could use tidying up)
|> Map.put_new(:event_signature, event[:event_signature])
end
defp decode_function_call_result(function_spec, [values]) when is_tuple(values) do
function_spec.input_names
|> Enum.zip(Tuple.to_list(values))
|> Enum.into(%{})
|> Fields.rename(function_spec)
end
# workaround for https://github.com/omgnetwork/elixir-omg/issues/1632
defp decode_function_call_result(%{function: "startExit"} = function_spec, values) do
function_spec.input_names
|> Enum.zip(values)
|> Enum.into(%{})
|> Fields.rename(function_spec)
end
defp decode_function_call_result(function_spec, values) do
function_spec.input_names
|> Enum.zip(values)
|> Enum.into(%{})
end
end
|
apps/omg_eth/lib/omg_eth/root_chain/abi.ex
| 0.690559
| 0.415373
|
abi.ex
|
starcoder
|
defmodule OptionParser do
@moduledoc """
This module contains functions to parse command line arguments.
"""
@doc """
Parses `argv` and returns a tuple with the parsed options, its
arguments, and a list options that couldn't be parsed.
## Examples
iex> OptionParser.parse(["--debug"])
{ [debug: true], [], [] }
iex> OptionParser.parse(["--source", "lib"])
{ [source: "lib"], [], [] }
iex> OptionParser.parse(["--source-path", "lib", "test/enum_test.exs", "--verbose"])
{ [source_path: "lib", verbose: true], ["test/enum_test.exs"], [] }
Notice how Elixir automatically translates the "--source-path"
switch to the underscored atom `:source_path`, which better follows
Elixir conventions.
## Aliases
A set of aliases can be given as the second argument:
iex> OptionParser.parse(["-d"], aliases: [d: :debug])
{ [debug: true], [], [] }
## Switches
Extra information about switches can be given as arguments, too.
This is useful when a switch must behave as a boolean
or if duplicated switches should be kept, overriden or accumulated.
The following types are supported:
* `:boolean` - Marks the given switch as a boolean. Boolean switches
never consume the following value unless it is
`true` or `false`;
* `:integer` - Parses the switch as an integer;
* `:float` - Parses the switch as a float;
If a switch can't be parsed, the option is returned in the invalid
options list (third element of the returned tuple).
The following extra options are supported:
* `:keep` - Keeps duplicated items in the list instead of overriding;
Examples:
iex> OptionParser.parse(["--unlock", "path/to/file"], switches: [unlock: :boolean])
{ [unlock: true], ["path/to/file"], [] }
iex> OptionParser.parse(["--unlock", "--limit", "0", "path/to/file"],
...> switches: [unlock: :boolean, limit: :integer])
{ [unlock: true, limit: 0], ["path/to/file"], [] }
iex> OptionParser.parse(["-limit", "3"], switches: [limit: :integer])
{ [limit: 3], [], [] }
iex> OptionParser.parse(["-limit", "yyz"], switches: [limit: :integer])
{ [], [], [limit: "yyz"] }
## Negation switches
Any switches starting with `--no-` are always considered to be
booleans and never parse the next value:
iex> OptionParser.parse(["--no-op", "path/to/file"])
{ [no_op: true], ["path/to/file"], [] }
In case the negated switch exists as a boolean, it sets the boolean to false:
iex> OptionParser.parse(["--no-op", "path/to/file"], switches: [op: :boolean])
{ [op: false], ["path/to/file"], [] }
"""
def parse(argv, opts // []) when is_list(argv) and is_list(opts) do
parse(argv, opts, true)
end
@doc """
Similar to `parse/2` but only parses the head of `argv`;
as soon as it finds a non-switch, it stops parsing.
See `parse/2` for more information.
## Example
iex> OptionParser.parse_head(["--source", "lib", "test/enum_test.exs", "--verbose"])
{ [source: "lib"], ["test/enum_test.exs", "--verbose"], [] }
iex> OptionParser.parse_head(["--verbose", "--source", "lib", "test/enum_test.exs", "--unlock"])
{ [verbose: true, source: "lib"], ["test/enum_test.exs", "--unlock"], [] }
"""
def parse_head(argv, opts // []) when is_list(argv) and is_list(opts) do
parse(argv, opts, false)
end
## Helpers
defp parse(argv, opts, bool) do
aliases = opts[:aliases] || []
switches = opts[:switches] || []
parse(argv, aliases, switches, bool)
end
defp parse(argv, aliases, switches, all) do
parse(argv, aliases, switches, [], [], [], all)
end
defp parse(["--"|_] = value, _aliases, _switches, dict, _args, invalid, _all) do
{ Enum.reverse(dict), value, Enum.reverse(invalid) }
end
defp parse(["-" <> option|t], aliases, switches, dict, args, invalid, all) do
{ option, kinds, value } = normalize_option(option, switches, aliases)
if nil?(value) do
{ value, t } =
if :boolean in kinds do
{ true, t }
else
value_from_tail(t)
end
end
{ dict, invalid } = store_option(dict, invalid, option, value, kinds)
parse(t, aliases, switches, dict, args, invalid, all)
end
defp parse([h|t], aliases, switches, dict, args, invalid, true) do
parse(t, aliases, switches, dict, [h|args], invalid, true)
end
defp parse([], _, _switches, dict, args, invalid, true) do
{ Enum.reverse(dict), Enum.reverse(args), Enum.reverse(invalid) }
end
defp parse(value, _, _switches, dict, _args, invalid, false) do
{ Enum.reverse(dict), value, Enum.reverse(invalid) }
end
defp value_from_tail(["-" <> _|_] = t), do: { true, t }
defp value_from_tail([h|t]), do: { h, t }
defp value_from_tail([]), do: { true, [] }
defp store_option(dict, invalid, option, value, kinds) do
{ invalid_option, value } =
cond do
:boolean in kinds ->
{ nil, value in [true, "true"] }
:integer in kinds ->
case Integer.parse(value) do
{ value, "" } -> { nil, value }
_ -> { option, value }
end
:float in kinds ->
case Float.parse(value) do
{ value, "" } -> { nil, value }
_ -> { option, value }
end
true ->
{ nil, value }
end
if invalid_option do
{ dict, [{ option, value }|invalid] }
else
{ do_store_option(dict, option, value, kinds), invalid }
end
end
defp do_store_option(dict, option, value, kinds) do
cond do
:keep in kinds ->
[{ option, value }|dict]
true ->
[{ option, value }|Keyword.delete(dict, option)]
end
end
defp normalize_option(<<?-, option :: binary>>, switches, aliases) do
normalize_option(option, switches, aliases)
end
defp normalize_option(option, switches, aliases) do
{ option, value } = split_option(option)
if non_neg = get_non_negated(option, aliases) do
kinds = List.wrap(switches[non_neg])
if :boolean in kinds do
{ non_neg, kinds, false }
else
{ get_aliased(option, aliases), [:boolean], true }
end
else
atom = get_aliased(option, aliases)
{ atom, List.wrap(switches[atom]), value }
end
end
defp split_option(option) do
case :binary.split(option, "=") do
[h] -> { h, nil }
[h, t] -> { h, t }
end
end
defp to_underscore(option) do
bc <<c>> inbits option, do: << if(c == ?-, do: ?_, else: c) >>
end
defp get_aliased(option, aliases) do
atom = option |> to_underscore |> binary_to_atom
aliases[atom] || atom
end
defp get_non_negated("no-" <> rest, aliases), do: get_aliased(rest, aliases)
defp get_non_negated(_, _), do: nil
end
|
lib/elixir/lib/option_parser.ex
| 0.851922
| 0.495239
|
option_parser.ex
|
starcoder
|
defmodule HomeBot.DataStore.EnergyPostgresStore do
@moduledoc "Data store for energy data"
import HomeBot.DataStore.PostgresStore
def get_latest_measurement do
query("SELECT * FROM energy ORDER BY time DESC limit 1")
|> List.first()
end
def get_measurements_since(datetime) do
query("SELECT * FROM energy WHERE time > $1 ORDER BY time ASC", [datetime])
end
def get_electricity_usage(minutes) do
query(
"SELECT time, current_energy_usage as usage FROM energy WHERE time >= (NOW() - interval '#{minutes} minutes')"
)
end
@spec get_energy_usage(NaiveDateTime.t(), NaiveDateTime.t(), integer(), String.t()) ::
list(map())
def get_energy_usage(start_time, end_time, group_quantity, group_unit) do
query = """
SELECT time_bucket('#{group_quantity} #{group_unit}'::interval, time) AS bucket,
MIN(meter_low_tariff) AS min_meter_low_tariff,
MAX(meter_low_tariff) AS meter_low_tariff,
MIN(meter_normal_tariff) AS min_meter_normal_tariff,
MAX(meter_normal_tariff) AS meter_normal_tariff,
MIN(meter_supplied_low_tariff) AS min_meter_supplied_low_tariff,
MAX(meter_supplied_low_tariff) AS meter_supplied_low_tariff,
MIN(meter_supplied_normal_tariff) AS min_meter_supplied_normal_tariff,
MAX(meter_supplied_normal_tariff) AS meter_supplied_normal_tariff,
MIN(meter_total_tariff) AS min_meter_total_tariff,
MAX(meter_total_tariff) AS meter_total_tariff,
MIN(meter_supplied_total_tariff) AS min_meter_supplied_total_tariff,
MAX(meter_supplied_total_tariff) AS meter_supplied_total_tariff,
MIN(current_gas_usage) AS min_gas_meter,
MAX(current_gas_usage) as gas_meter
FROM energy
WHERE time >= $1 AND time < $2
GROUP BY bucket
ORDER BY bucket;
"""
result = query(query, [start_time, end_time])
first = List.first(result)
start_values = %{
meter_low_tariff: first[:min_meter_low_tariff],
meter_normal_tariff: first[:min_meter_normal_tariff],
meter_supplied_low_tariff: first[:min_meter_supplied_low_tariff],
meter_supplied_normal_tariff: first[:min_meter_supplied_normal_tariff],
meter_total_tariff: first[:min_meter_total_tariff],
meter_supplied_total_tariff: first[:min_meter_supplied_total_tariff],
gas_meter: first[:min_gas_meter]
}
{result, _} =
Enum.map_reduce(result, start_values, fn elem, previous ->
{get_electricity_increase(previous, elem), elem}
end)
result
end
def get_electricity_increase(previous, current) do
%{
time: current[:bucket],
usage_low_tariff:
Decimal.sub(
current[:meter_low_tariff],
previous[:meter_low_tariff]
)
|> Decimal.to_float(),
usage_normal_tariff:
Decimal.sub(
current[:meter_normal_tariff],
previous[:meter_normal_tariff]
)
|> Decimal.to_float(),
supplied_low_tariff:
Decimal.sub(
current[:meter_supplied_low_tariff],
previous[:meter_supplied_low_tariff]
)
|> Decimal.to_float(),
supplied_normal_tariff:
Decimal.sub(
current[:meter_supplied_normal_tariff],
previous[:meter_supplied_normal_tariff]
)
|> Decimal.to_float(),
supplied_total_tariff:
Decimal.sub(
current[:meter_supplied_total_tariff],
previous[:meter_supplied_total_tariff]
)
|> Decimal.to_float(),
usage_total_tariff:
Decimal.sub(
current[:meter_total_tariff],
previous[:meter_total_tariff]
)
|> Decimal.to_float(),
usage_gas_meter:
Decimal.sub(
current[:gas_meter],
previous[:gas_meter]
)
|> Decimal.to_float()
}
end
end
|
lib/home_bot/data_store/energy_postgres_store.ex
| 0.779616
| 0.457924
|
energy_postgres_store.ex
|
starcoder
|
defmodule WebDriver.Keys do
@moduledoc """
This provides symbols to represent various non-printable keystrokes that
can be sent to a web browser.
The codes are defined in: https://code.google.com/p/selenium/wiki/JsonWireProtocol#/session/:sessionId/element/:id/value
"""
@non_text_keys [
{ :key_null, "\x{e000}" },
{ :key_cancel, "\x{e001}"},
{ :key_help, "\x{e002}"},
{ :key_back_space,"\x{e003}"},
{ :key_tab, "\x{e004}"},
{ :key_clear, "\x{e005}"},
{ :key_return, "\x{e006}"},
{ :key_enter, "\x{e007}"},
{ :key_shift, "\x{e008}"},
{ :key_control, "\x{e009}"},
{ :key_alt, "\x{e00a}"},
{ :key_pause, "\x{e00b}"},
{ :key_escape, "\x{e00c}"},
{ :key_space, "\x{e00d}"},
{ :key_page_up, "\x{e00e}"},
{ :key_page_down, "\x{e00f}"},
{ :key_end, "\x{e010}"},
{ :key_home, "\x{e011}"},
{ :key_left, "\x{e012}"},
{ :key_up, "\x{e013}"},
{ :key_right, "\x{e014}"},
{ :key_down, "\x{e015}"},
{ :key_insert, "\x{e016}"},
{ :key_delete, "\x{e017}"},
{ :key_semicolon, "\x{e018}"},
{ :key_equals, "\x{e019}"},
{ :key_numpad_0, "\x{e01a}"},
{ :key_numpad_1, "\x{e01b}"},
{ :key_numpad_2, "\x{e01c}"},
{ :key_numpad_3, "\x{e01d}"},
{ :key_numpad_4, "\x{e01e}"},
{ :key_numpad_5, "\x{e01f}"},
{ :key_numpad_6, "\x{e020}"},
{ :key_numpad_7, "\x{e021}"},
{ :key_numpad_8, "\x{e022}"},
{ :key_numpad_9, "\x{e023}"},
{ :key_multiply, "\x{e024}"},
{ :key_add, "\x{e025}"},
{ :key_separator, "\x{e026}"},
{ :key_subtract, "\x{e027}"},
{ :key_decimal, "\x{e028}"},
{ :key_divide, "\x{e029}"},
{ :key_f1, "\x{e031}"},
{ :key_f2, "\x{e032}"},
{ :key_f3, "\x{e033}"},
{ :key_f4, "\x{e034}"},
{ :key_f5, "\x{e035}"},
{ :key_f6, "\x{e036}"},
{ :key_f7, "\x{e037}"},
{ :key_f8, "\x{e038}"},
{ :key_f9, "\x{e039}"},
{ :key_f10, "\x{e03a}"},
{ :key_f11, "\x{e03b}"},
{ :key_f12, "\x{e03c}"},
{ :key_meta, "\x{e03d}"}
]
defp val {:ok, value} do
value
end
defp val :error do
{:error, :invalid_key_code}
end
@doc """
This function is used to return the Unicode codes for simuluation non text key
presses.
See: https://code.google.com/p/selenium/wiki/JsonWireProtocol#/session/:sessionId/element/:id/value
Key codes that are available:
```Elixir
{ :key_null, "\x{e000}"},
{ :key_cancel, "\x{e001}"},
{ :key_help, "\x{e002}"},
{ :key_back_space,"\x{e003}"},
{ :key_tab, "\x{e004}"},
{ :key_clear, "\x{e005}"},
{ :key_return, "\x{e006}"},
{ :key_enter, "\x{e007}"},
{ :key_shift, "\x{e008}"},
{ :key_control, "\x{e009}"},
{ :key_alt, "\x{e00a}"},
{ :key_pause, "\x{e00b}"},
{ :key_escape, "\x{e00c}"},
{ :key_space, "\x{e00d}"},
{ :key_page_up, "\x{e00e}"},
{ :key_page_down, "\x{e00f}"},
{ :key_end, "\x{e010}"},
{ :key_home, "\x{e011}"},
{ :key_left, "\x{e012}"},
{ :key_up, "\x{e013}"},
{ :key_right, "\x{e014}"},
{ :key_down, "\x{e015}"},
{ :key_insert, "\x{e016}"},
{ :key_delete, "\x{e017}"},
{ :key_semicolon, "\x{e018}"},
{ :key_equals, "\x{e019}"},
{ :key_numpad_0, "\x{e01a}"},
{ :key_numpad_1, "\x{e01b}"},
{ :key_numpad_2, "\x{e01c}"},
{ :key_numpad_3, "\x{e01d}"},
{ :key_numpad_4, "\x{e01e}"},
{ :key_numpad_5, "\x{e01f}"},
{ :key_numpad_6, "\x{e020}"},
{ :key_numpad_7, "\x{e021}"},
{ :key_numpad_8, "\x{e022}"},
{ :key_numpad_9, "\x{e023}"},
{ :key_multiply, "\x{e024}"},
{ :key_add, "\x{e025}"},
{ :key_separator, "\x{e026}"},
{ :key_subtract, "\x{e027}"},
{ :key_decimal, "\x{e028}"},
{ :key_divide, "\x{e029}"},
{ :key_f1, "\x{e031}"},
{ :key_f2, "\x{e032}"},
{ :key_f3, "\x{e033}"},
{ :key_f4, "\x{e034}"},
{ :key_f5, "\x{e035}"},
{ :key_f6, "\x{e036}"},
{ :key_f7, "\x{e037}"},
{ :key_f8, "\x{e038}"},
{ :key_f9, "\x{e039}"},
{ :key_f10, "\x{e03a}"},
{ :key_f11, "\x{e03b}"},
{ :key_f12, "\x{e03c}"},
{ :key_meta, "\x{e03d}"}
```
"""
def key key_code do
Keyword.fetch(@non_text_keys, key_code)
|> val
end
end
|
lib/webdriver/keys.ex
| 0.754373
| 0.745167
|
keys.ex
|
starcoder
|
defmodule Exq.Redis.JobStat do
@moduledoc """
The JobStat module encapsulates storing system-wide stats on top of Redis
It aims to be compatible with the Sidekiq stats format.
"""
require Logger
alias Exq.Support.{Binary, Process, Job, Time, Node}
alias Exq.Redis.{Connection, JobQueue}
def record_processed_commands(namespace, _job, current_date \\ DateTime.utc_now()) do
{time, date} = Time.format_current_date(current_date)
[
["INCR", JobQueue.full_key(namespace, "stat:processed")],
["INCR", JobQueue.full_key(namespace, "stat:processed_rt:#{time}")],
["EXPIRE", JobQueue.full_key(namespace, "stat:processed_rt:#{time}"), 120],
["INCR", JobQueue.full_key(namespace, "stat:processed:#{date}")]
]
end
def record_processed(redis, namespace, job, current_date \\ DateTime.utc_now()) do
instr = record_processed_commands(namespace, job, current_date)
{:ok, [count, _, _, _]} = Connection.qp(redis, instr)
{:ok, count}
end
def record_failure_commands(namespace, _error, _job, current_date \\ DateTime.utc_now()) do
{time, date} = Time.format_current_date(current_date)
[
["INCR", JobQueue.full_key(namespace, "stat:failed")],
["INCR", JobQueue.full_key(namespace, "stat:failed_rt:#{time}")],
["EXPIRE", JobQueue.full_key(namespace, "stat:failed_rt:#{time}"), 120],
["INCR", JobQueue.full_key(namespace, "stat:failed:#{date}")]
]
end
def record_failure(redis, namespace, error, job, current_date \\ DateTime.utc_now()) do
instr = record_failure_commands(namespace, error, job, current_date)
{:ok, [count, _, _, _]} = Connection.qp(redis, instr)
{:ok, count}
end
def add_process_commands(namespace, process_info, serialized_process \\ nil) do
serialized = serialized_process || Exq.Support.Process.encode(process_info)
[["HSET", workers_key(namespace, process_info.host), process_info.pid, serialized]]
end
def add_process(redis, namespace, process_info, serialized_process \\ nil) do
instr = add_process_commands(namespace, process_info, serialized_process)
Connection.qp!(redis, instr)
:ok
end
def remove_process_commands(namespace, process_info) do
[["HDEL", workers_key(namespace, process_info.host), process_info.pid]]
end
def remove_process(redis, namespace, process_info) do
instr = remove_process_commands(namespace, process_info)
Connection.qp!(redis, instr)
:ok
end
def cleanup_processes(redis, namespace, host) do
Connection.del!(redis, workers_key(namespace, host))
:ok
end
def node_ping(redis, namespace, node) do
key = node_info_key(namespace, node.identity)
case Connection.qp(
redis,
[
["MULTI"],
["SADD", nodes_key(namespace), node.identity],
[
"HMSET",
key,
"info",
Node.encode(node),
"busy",
node.busy,
"beat",
Time.unix_seconds(),
"quiet",
node.quiet
],
["EXPIRE", key, 60],
["RPOP", "#{key}-signals"],
["EXEC"]
]
) do
{:ok, ["OK", "QUEUED", "QUEUED", "QUEUED", "QUEUED", [_, "OK", 1, signal]]} ->
signal
error ->
Logger.error("Failed to send node stats. Unexpected error from redis: #{inspect(error)}")
nil
end
end
def node_signal(redis, namespace, node_id, signal_name) do
key = node_info_key(namespace, node_id)
signal_key = "#{key}-signals"
case Connection.qp(redis, [
["MULTI"],
["LPUSH", signal_key, signal_name],
["EXPIRE", signal_key, 60],
["EXEC"]
]) do
{:ok, ["OK", "QUEUED", "QUEUED", [_, 1]]} -> :ok
error -> error
end
end
def node_ids(redis, namespace) do
Connection.smembers!(redis, nodes_key(namespace))
end
def nodes(redis, namespace) do
commands =
node_ids(redis, namespace)
|> Enum.map(fn node_id -> ["HGET", node_info_key(namespace, node_id), "info"] end)
if Enum.empty?(commands) do
[]
else
Connection.qp!(redis, commands)
|> Enum.flat_map(fn result ->
if result && result != "" do
[Node.decode(result)]
else
[]
end
end)
end
end
def prune_dead_nodes(redis, namespace) do
node_ids = node_ids(redis, namespace)
commands =
node_ids
|> Enum.map(fn node_id -> ["HEXISTS", node_info_key(namespace, node_id), "info"] end)
if Enum.empty?(commands) do
[]
else
dead_node_ids =
Connection.qp!(redis, commands)
|> Enum.zip(node_ids)
|> Enum.flat_map(fn {exists, node_id} ->
if exists == 0 do
[node_id]
else
[]
end
end)
if !Enum.empty?(dead_node_ids) do
commands = [
["SREM", nodes_key(namespace)] ++ dead_node_ids,
["DEL"] ++ Enum.map(node_ids, &workers_key(namespace, &1))
]
Connection.qp(redis, commands)
end
end
end
def busy(redis, namespace) do
commands =
node_ids(redis, namespace)
|> Enum.map(fn node_id -> ["HGET", node_info_key(namespace, node_id), "busy"] end)
if Enum.empty?(commands) do
0
else
Connection.qp!(redis, commands)
|> Enum.reduce(0, fn count, sum -> sum + decode_integer(count) end)
end
end
def processes(redis, namespace) do
commands =
node_ids(redis, namespace)
|> Enum.map(fn node_id -> ["HVALS", workers_key(namespace, node_id)] end)
if Enum.empty?(commands) do
[]
else
Connection.qp!(redis, commands)
|> List.flatten()
|> Enum.map(&Process.decode/1)
end
end
def find_failed(redis, namespace, jid) do
redis
|> Connection.zrange!(JobQueue.full_key(namespace, "dead"), 0, -1)
|> JobQueue.search_jobs(jid)
end
def find_failed(redis, namespace, score, jid, options) do
find_by_score_and_jid(redis, JobQueue.full_key(namespace, "dead"), score, jid, options)
end
def find_retry(redis, namespace, score, jid, options) do
find_by_score_and_jid(redis, JobQueue.full_key(namespace, "retry"), score, jid, options)
end
def find_scheduled(redis, namespace, score, jid, options) do
find_by_score_and_jid(redis, JobQueue.full_key(namespace, "schedule"), score, jid, options)
end
def remove_queue(redis, namespace, queue) do
Connection.qp(redis, [
["SREM", JobQueue.full_key(namespace, "queues"), queue],
["DEL", JobQueue.queue_key(namespace, queue)]
])
end
def remove_failed(redis, namespace, jid) do
{:ok, failure} = find_failed(redis, namespace, jid)
Connection.qp(redis, [
["DECR", JobQueue.full_key(namespace, "stat:failed")],
["ZREM", JobQueue.full_key(namespace, "dead"), Job.encode(failure)]
])
end
def clear_failed(redis, namespace) do
Connection.qp(redis, [
["SET", JobQueue.full_key(namespace, "stat:failed"), 0],
["DEL", JobQueue.full_key(namespace, "dead")]
])
end
def clear_processes(redis, namespace) do
commands =
node_ids(redis, namespace)
|> Enum.map(fn node_id -> ["DEL", workers_key(namespace, node_id)] end)
if Enum.empty?(commands) do
0
else
Connection.qp!(redis, commands)
end
end
def realtime_stats(redis, namespace) do
failure_keys = realtime_stats_scanner(redis, JobQueue.full_key(namespace, "stat:failed_rt:*"))
success_keys =
realtime_stats_scanner(redis, JobQueue.full_key(namespace, "stat:processed_rt:*"))
formatter = realtime_stats_formatter(redis, namespace)
failures = formatter.(failure_keys, "stat:failed_rt:")
successes = formatter.(success_keys, "stat:processed_rt:")
{:ok, failures, successes}
end
defp realtime_stats_scanner(redis, namespace) do
{:ok, [[cursor, result]]} =
Connection.qp(redis, [["SCAN", 0, "MATCH", namespace, "COUNT", 1_000]])
realtime_stats_scan_keys(redis, namespace, cursor, result)
end
defp realtime_stats_scan_keys(_redis, _namespace, "0", accumulator) do
accumulator
end
defp realtime_stats_scan_keys(redis, namespace, cursor, accumulator) do
{:ok, [[new_cursor, result]]} =
Connection.qp(redis, [["SCAN", cursor, "MATCH", namespace, "COUNT", 1_000]])
realtime_stats_scan_keys(redis, namespace, new_cursor, accumulator ++ result)
end
defp realtime_stats_formatter(redis, namespace) do
fn keys, ns ->
if Enum.empty?(keys) do
[]
else
{:ok, counts} = Connection.qp(redis, Enum.map(keys, &["GET", &1]))
Enum.map(keys, &Binary.take_prefix(&1, JobQueue.full_key(namespace, ns)))
|> Enum.zip(counts)
end
end
end
def get_count(redis, namespace, key) do
Connection.get!(redis, JobQueue.full_key(namespace, "stat:#{key}"))
|> decode_integer()
end
def get_counts(redis, namespace, keys) do
{:ok, results} =
Connection.q(redis, ["MGET" | Enum.map(keys, &JobQueue.full_key(namespace, "stat:#{&1}"))])
Enum.map(results, &decode_integer/1)
end
def decode_integer(:undefined), do: 0
def decode_integer(nil), do: 0
def decode_integer(count) when is_integer(count), do: count
def decode_integer(count) when is_binary(count) do
{count, _} = Integer.parse(count)
count
end
defp find_by_score_and_jid(redis, zset, score, jid, options) do
redis
|> Connection.zrangebyscore!(zset, score, score)
|> JobQueue.search_jobs(jid, !Keyword.get(options, :raw, false))
end
defp workers_key(namespace, node_id) do
JobQueue.full_key(namespace, "#{node_id}:workers")
end
defp nodes_key(namespace) do
"#{namespace}:processes"
end
defp node_info_key(namespace, node_id) do
"#{namespace}:#{node_id}"
end
end
|
lib/exq/redis/job_stat.ex
| 0.592902
| 0.475666
|
job_stat.ex
|
starcoder
|
defmodule Personnummer do
@moduledoc """
A module to make the usage of Swedish social security numbers easier.
"""
defp valid_date_part?(year, month, day) do
{social_security_success, _} = Date.new(year, month, day)
{coordination_success, _} = Date.new(year, month, day - 60)
social_security_success == :ok || coordination_success == :ok
end
defp fix_seperator(candidate) do
if(String.length(candidate) != 1) do
"-"
else
candidate
end
end
@doc """
Converts a number to a gender. This is the last digit in a personnummer before the luhn check digit.
## Examples
iex> Personnummer.number_to_gender(4)
:female
iex> Personnummer.number_to_gender(5)
:male
"""
def number_to_gender(number) when is_integer(number) do
if rem(number, 2) == 1, do: :male, else: :female
end
defp validate_map(candidate) when is_map(candidate) do
year = candidate.year
|> Integer.to_string()
|> String.pad_leading(2, "0")
month = candidate.month
|> Integer.to_string()
|> String.pad_leading(2, "0")
day = candidate.day
|> Integer.to_string()
|> String.pad_leading(2, "0")
number = candidate.number
|> Integer.to_string()
|> String.pad_leading(3, "0")
check = candidate.check
|> Integer.to_string()
valid_luhn = year <> month <> day <> number <> check
|> Luhn.valid?()
cond do
!valid_luhn ->
:invalid_luhn
!valid_date_part?(candidate.year, candidate.month, candidate.day) ->
:invalid_date
true ->
:ok
end
end
defp regex_match(value) do
matches = Regex.run(~r/^(\d{2}){0,1}(\d{2})(\d{2})(\d{2})([-|+]{0,1})?(\d{3})(\d{1})$/, value)
if(matches == nil or length(matches) < 7 ) do
{:error, nil}
else
{:ok, matches}
end
end
defp to_map(list) when is_nil(list) == false do
# The century is not part of the standard and is therefor not part of the map
%{
year: Enum.at(list, 2)
|> Integer.parse()
|> elem(0),
month: Enum.at(list, 3)
|> Integer.parse()
|> elem(0),
day: Enum.at(list, 4)
|> Integer.parse()
|> elem(0),
seperator: Enum.at(list, 5)
|> fix_seperator(),
number: Enum.at(list, 6)
|> Integer.parse()
|> elem(0),
check: Enum.at(list, 7)
|> Integer.parse()
|> elem(0),
gender: Enum.at(list, 6)
|> Integer.parse()
|> elem(0)
|> number_to_gender()
}
end
@doc """
Validate Swedish social security number.
## Examples
iex> Personnummer.valid?(6403273813)
true
iex> Personnummer.valid?("19130401+2931")
true
"""
def valid?(value) do
{success, _} = parse(value)
success === :ok
end
def parse(value) when is_integer(value) == false and is_binary(value) == false do
{:error, :invalid_type}
end
def parse(value) when is_integer(value) do
value
|> Integer.to_string()
|> parse()
end
@doc """
Parses a swedish social security number
## Examples
iex> Personnummer.parse("510818-9167")
{:ok, %{ check: 7, day: 18, gender: :female, month: 8, number: 916, seperator: "-", year: 51 }}
"""
def parse(value) when is_binary(value) and is_nil(value) == false do
{match_result, matches} = regex_match(value)
if(match_result == :error) do
{:error, :invalid_format}
else
map = to_map(matches)
validation_result = validate_map(map)
if(validation_result === :ok) do
{:ok, map}
else
{:error, validation_result}
end
end
end
end
|
lib/personnummer.ex
| 0.646906
| 0.493836
|
personnummer.ex
|
starcoder
|
defmodule Cldr.Territory do
@moduledoc """
Supports the CLDR Territories definitions which provide the localization of many
territories.
"""
alias Cldr.LanguageTag
@type as_options :: [as: :atom | :binary | :charlist]
@type atom_binary_charlist :: atom() | binary() | charlist()
@type atom_binary_tag :: atom() | binary() | LanguageTag.t()
@type atom_tag :: atom() | LanguageTag.t()
@type binary_tag :: binary() | LanguageTag.t()
@type error :: {module, binary()}
@type styles :: :short | :standard | :variant
@type tag :: LanguageTag.t()
@type options :: [{:locale, binary_tag()} | {:style, styles()}]
@styles [:short, :standard, :variant]
@territory_containment Cldr.Config.territory_containers()
@territory_info Cldr.Config.territories()
@doc """
Returns a list of available styles.
## Example
iex> Cldr.Territory.available_styles()
[:short, :standard, :variant]
"""
@spec available_styles() :: [styles()]
def available_styles(), do: @styles
@doc """
Returns the available territories for a given locale.
* `locale` is any configured locale. See `Cldr.known_locale_names/1`.
The default is `Cldr.get_locale/1`
## Example
=> Cldr.Territory.available_territories(TestBackend.Cldr)
[:"001", :"002", :"003", :"005", :"009", :"011", :"013", :"014", :"015", :"017",
:"018", :"019", :"021", :"029", :"030", :"034", :"035", :"039", :"053", :"054",
:"057", :"061", :"142", :"143", :"145", :"150", :"151", :"154", :"155", :"202",
:"419", :AC, :AD, :AE, :AF, :AG, :AI, :AL, :AM, :AO, :AQ, :AR, :AS, :AT, :AU,
:AW, :AX, :AZ, :BA, :BB, ...]
"""
@spec available_territories(Cldr.backend()) :: [atom()]
def available_territories(backend) do
module = Module.concat(backend, Territory)
module.available_territories()
end
@doc """
Returns the available territory subdivisions for a given locale.
* `locale` is any configured locale. See `Cldr.known_locale_names/1`.
The default is `Cldr.get_locale/1`
## Example
=> Cldr.Territory.available_subdivisions(TestBackend.Cldr)
["ad02", "ad03", "ad04", "ad05", "ad06", "ad07", "ad08", ...]
"""
@spec available_subdivisions(Cldr.backend()) :: [atom()]
def available_subdivisions(backend) do
module = Module.concat(backend, Territory)
module.available_subdivisions()
end
@doc """
Returns a map of all known territories in a given locale.
* `locale` is any configured locale. See `Cldr.known_locale_names/1`.
The default is `Cldr.get_locale/1`
## Example
=> Cldr.Territory.known_territories(TestBackend.Cldr)
%{SN: %{standard: "Senegal"}, "061": %{standard: "Polynesia"},
BH: %{standard: "Bahrain"}, TM: %{standard: "Turkmenistan"},
"009": %{standard: "Oceania"}, CW: %{standard: "Curaçao"},
FR: %{standard: "France"}, TN: %{standard: "Tunisia"},
FI: %{standard: "Finland"}, BF: %{standard: "Burkina Faso"},
"155": %{standard: "Western Europe"}, GL: %{standard: "Greenland"},
VI: %{standard: "U.S. Virgin Islands"}, ZW: %{standard: "Zimbabwe"},
AR: %{standard: "Argentina"}, SG: %{standard: "Singapore"},
SZ: %{standard: "Swaziland"}, ID: %{standard: "Indonesia"},
NR: %{standard: "Nauru"}, RW: %{standard: "Rwanda"},
TR: %{standard: "Turkey"}, IS: %{standard: "Iceland"},
ME: %{standard: "Montenegro"}, AW: %{standard: "Aruba"},
PY: %{standard: "Paraguay"}, "145": %{standard: "Western Asia"},
CG: %{standard: "Congo - Brazzaville", variant: "Congo (Republic)"},
LT: %{standard: "Lithuania"}, SA: %{standard: "Saudi Arabia"},
MZ: %{standard: "Mozambique"}, NU: %{standard: "Niue"},
NG: %{standard: "Nigeria"}, CK: %{standard: "Cook Islands"},
ZM: %{standard: "Zambia"}, LK: %{standard: "Sri Lanka"},
UY: %{standard: "Uruguay"}, YE: %{standard: "Yemen"},
"011": %{standard: "Western Africa"},
CC: %{standard: "Cocos (Keeling) Islands"}, BY: %{standard: "Belarus"},
IL: %{standard: "Israel"}, KY: %{standard: "Cayman Islands"},
GN: %{standard: "Guinea"}, VN: %{standard: "Vietnam"},
PE: %{standard: "Peru"}, HU: %{standard: "Hungary"},
HN: %{standard: "Honduras"}, GI: %{standard: "Gibraltar"},
"142": %{standard: "Asia"}, "029": %{...}, ...}
"""
@spec known_territories(Cldr.backend()) :: map() | {:error, error()}
def known_territories(backend) do
module = Module.concat(backend, Territory)
module.known_territories()
end
@doc """
Returns a map of all known territory subdivisions in a given locale.
* `locale` is any configured locale. See `Cldr.known_locale_names/1`.
The default is `Cldr.get_locale/1`
## Example
=> Cldr.Territory.known_subdivisions(TestBackend.Cldr)
%{
"ad02" => "Canillo",
"ad03" => "Encamp",
"ad04" => "La Massana",
"ad05" => "Ordino",
"ad06" => "<NAME>",
"ad07" => "<NAME>",
...
"""
@spec known_subdivisions(Cldr.backend()) :: map() | {:error, error()}
def known_subdivisions(backend) do
module = Module.concat(backend, Territory)
module.known_subdivisions()
end
@doc """
Returns a list of subdivisions of a given territory.
## Example
=> Cldr.Territory.known_territory_subdivisions(:GB, TestBackend.Cldr)
{:ok, ["gbabc", "gbabd", "gbabe", "gbagb", "gbagy", "gband", "gbann",
"gbans", "gbbas", "gbbbd", "gbbdf", "gbbdg", "gbben", "gbbex", "gbbfs",
"gbbge", "gbbgw", "gbbir", "gbbkm", "gbbmh", "gbbne", "gbbnh", "gbbns",
"gbbol", "gbbpl", "gbbrc", "gbbrd", "gbbry", "gbbst", "gbbur", "gbcam",
"gbcay", "gbcbf", "gbccg", "gbcgn", "gbche", "gbchw", "gbcld", "gbclk",
"gbcma", "gbcmd", "gbcmn", "gbcon", "gbcov", "gbcrf", "gbcry", "gbcwy",
"gbdal", "gbdby", "gbden", ...]}
"""
@spec known_territory_subdivisions(atom_binary_tag(), Cldr.backend()) ::
{:ok, binary()} | {:error, error()}
def known_territory_subdivisions(territory_code, backend) do
module = Module.concat(backend, Territory)
module.known_territory_subdivisions(territory_code)
end
@doc """
Localized string for the given territory code.
Returns `{:ok, String.t}` if successful, otherwise `{:error, reason}`.
* `options` are:
* `locale` is any configured locale. See `Cldr.known_locale_names/1`.
The default is `Cldr.get_locale/1`
* `style` is one of those returned by `Cldr.Territory.available_styles/0`.
The current styles are `:short`, `:standard` and `:variant`.
The default is `:standard`
## Example
iex> Cldr.Territory.from_territory_code(:GB, TestBackend.Cldr)
{:ok, "United Kingdom"}
iex> Cldr.Territory.from_territory_code(:GB, TestBackend.Cldr, [style: :short])
{:ok, "UK"}
iex> Cldr.Territory.from_territory_code(:GB, TestBackend.Cldr, [style: :ZZZ])
{:error, {Cldr.UnknownStyleError, "The style :ZZZ is unknown"}}
iex> Cldr.Territory.from_territory_code(:GB, TestBackend.Cldr, [style: "ZZZ"])
{:error, {Cldr.UnknownStyleError, "The style \\"ZZZ\\" is unknown"}}
iex> Cldr.Territory.from_territory_code(:GB, TestBackend.Cldr, [locale: "pt"])
{:ok, "Reino Unido"}
iex> Cldr.Territory.from_territory_code(:GB, TestBackend.Cldr, [locale: :zzz])
{:error, {Cldr.UnknownLocaleError, "The locale :zzz is not known."}}
iex> Cldr.Territory.from_territory_code(:GB, TestBackend.Cldr, [locale: "zzz"])
{:error, {Cldr.UnknownLocaleError, "The locale \\"zzz\\" is not known."}}
"""
@spec from_territory_code(atom_binary_tag(), Cldr.backend(), options()) :: {:ok, binary()} | {:error, error()}
def from_territory_code(territory_code, backend, options \\ [locale: Cldr.get_locale(), style: :standard]) do
module = Module.concat(backend, Territory)
module.from_territory_code(territory_code, options)
end
@doc """
Localized string for the given subdivision code.
Returns `{:ok, String.t}` if successful, otherwise `{:error, reason}`.
* `options` are:
* `locale` is any configured locale. See `Cldr.known_locale_names/1`.
The default is `Cldr.get_locale/1`
## Example
iex> #{inspect __MODULE__}.from_subdivision_code("gbcma", TestBackend.Cldr, locale: "en")
{:ok, "Cumbria"}
iex> #{inspect __MODULE__}.from_subdivision_code("gbcma", TestBackend.Cldr, locale: "pl")
{:ok, "Kumbria"}
iex> #{inspect __MODULE__}.from_subdivision_code("gbcma", TestBackend.Cldr, locale: "bs")
{:error, {Cldr.UnknownSubdivisionError, "The locale \\"bs\\" has no translation for \\"gbcma\\"."}}
iex> #{inspect __MODULE__}.from_subdivision_code("invalid", TestBackend.Cldr, locale: "en")
{:error, {Cldr.UnknownTerritoryError, "The territory \\"invalid\\" is unknown"}}
iex> #{inspect __MODULE__}.from_subdivision_code("gbcma", TestBackend.Cldr, [locale: :zzz])
{:error, {Cldr.UnknownLocaleError, "The locale :zzz is not known."}}
iex> #{inspect __MODULE__}.from_subdivision_code("gbcma", TestBackend.Cldr, [locale: "zzz"])
{:error, {Cldr.UnknownLocaleError, "The locale \\"zzz\\" is not known."}}
"""
@spec from_subdivision_code(binary(), Cldr.backend(), [locale: binary_tag()]) ::
{:ok, binary()} | {:error, error()}
def from_subdivision_code(
subdivision_code,
backend,
options \\ [locale: Cldr.get_locale()]
) do
module = Module.concat(backend, Territory)
module.from_subdivision_code(subdivision_code, options)
end
@doc """
The same as `from_territory_code/2`, but raises an exception if it fails.
## Example
iex> Cldr.Territory.from_territory_code!(:GB, TestBackend.Cldr)
"United Kingdom"
iex> Cldr.Territory.from_territory_code!(:GB, TestBackend.Cldr, [style: :short])
"UK"
iex> Cldr.Territory.from_territory_code!(:GB, TestBackend.Cldr, [locale: "pt"])
"Reino Unido"
"""
@spec from_territory_code!(atom_binary_tag(), Cldr.backend(), options()) :: binary() | no_return()
def from_territory_code!(territory_code, backend, options \\ [locale: Cldr.get_locale(), style: :standard]) do
module = Module.concat(backend, Territory)
module.from_territory_code!(territory_code, options)
end
@doc """
The same as `from_subdivision_code/2`, but raises an exception if it fails.
## Example
iex> #{inspect __MODULE__}.from_subdivision_code!("gbcma", TestBackend.Cldr, locale: "en")
"Cumbria"
iex> #{inspect __MODULE__}.from_subdivision_code!("gbcma", TestBackend.Cldr, locale: "pl")
"Kumbria"
"""
@spec from_subdivision_code!(binary(), Cldr.backend(), [locale: binary_tag()]) ::
binary() | no_return()
def from_subdivision_code!(
territory_code,
backend,
options \\ [locale: Cldr.get_locale()]
) do
module = Module.concat(backend, Territory)
module.from_subdivision_code!(territory_code, options)
end
@doc """
Localized string for the given `LanguageTag.t`.
Returns `{:ok, String.t}` if successful, otherwise `{:error, reason}`.
* `options` are:
* `style` is one of those returned by `Cldr.Territory.available_styles/0`.
The current styles are `:short`, `:standard` and `:variant`.
The default is `:standard`
## Example
iex> Cldr.Territory.from_language_tag(Cldr.get_locale(TestBackend.Cldr), TestBackend.Cldr)
{:ok, "world"}
iex> Cldr.Territory.from_language_tag(Cldr.get_locale(TestBackend.Cldr), TestBackend.Cldr, [style: :short])
{:error, {Cldr.UnknownStyleError, "The style :short is unknown"}}
iex> Cldr.Territory.from_language_tag(Cldr.get_locale(TestBackend.Cldr), TestBackend.Cldr, [style: :ZZZ])
{:error, {Cldr.UnknownStyleError, "The style :ZZZ is unknown"}}
iex> Cldr.Territory.from_language_tag(Cldr.get_locale(TestBackend.Cldr), TestBackend.Cldr, [style: "ZZZ"])
{:error, {Cldr.UnknownStyleError, "The style \\"ZZZ\\" is unknown"}}
"""
@spec from_language_tag(tag(), Cldr.backend(), options()) :: {:ok, binary()} | {:error, error()}
def from_language_tag(tag, backend, options \\ [style: :standard]) do
module = Module.concat(backend, Territory)
module.from_language_tag(tag, options)
end
@doc """
The same as `from_language_tag/2`, but raises an exception if it fails.
## Example
iex> Cldr.Territory.from_language_tag!(Cldr.get_locale(TestBackend.Cldr), TestBackend.Cldr)
"world"
"""
@spec from_language_tag!(tag(), Cldr.backend(), options()) :: binary() | no_return()
def from_language_tag!(tag, backend, options \\ [style: :standard]) do
module = Module.concat(backend, Territory)
module.from_language_tag!(tag, options)
end
@doc """
Translate a localized string from one locale to another.
Returns `{:ok, result}` if successful, otherwise `{:error, reason}`.
* `to_locale` is any configured locale. See `Cldr.known_locale_names/1`.
The default is `Cldr.get_locale/0`
## Example
iex> Cldr.Territory.translate_territory("Reino Unido", "pt", TestBackend.Cldr)
{:ok, "United Kingdom"}
iex> Cldr.Territory.translate_territory("United Kingdom", "en", TestBackend.Cldr, "pt")
{:ok, "Reino Unido"}
iex> Cldr.Territory.translate_territory("Reino Unido", :zzz, TestBackend.Cldr)
{:error, {Cldr.UnknownLocaleError, "The locale :zzz is not known."}}
iex> Cldr.Territory.translate_territory("United Kingdom", "en", TestBackend.Cldr, "zzz")
{:error, {Cldr.UnknownLocaleError, "The locale \\"zzz\\" is not known."}}
"""
@spec translate_territory(binary(), binary_tag(), Cldr.backend(), binary_tag(), atom()) :: {:ok, binary()} | {:error, error()}
def translate_territory(localized_string, from_locale, backend, to_locale, style) do
module = Module.concat(backend, Territory)
module.translate_territory(localized_string, from_locale, to_locale, style)
end
def translate_territory(localized_string, from_locale) do
backend = Cldr.default_backend!()
module = Module.concat(backend, Territory)
module.translate_territory(localized_string, from_locale, backend.get_locale(), :standard)
end
def translate_territory(localized_string, from_locale, backend) do
module = Module.concat(backend, Territory)
module.translate_territory(localized_string, from_locale, backend.get_locale(), :standard)
end
def translate_territory(localized_string, from_locale, backend, to_locale) do
module = Module.concat(backend, Territory)
module.translate_territory(localized_string, from_locale, to_locale, :standard)
end
@doc """
Translate a localized string from one locale to another.
Returns `{:ok, result}` if successful, otherwise `{:error, reason}`.
* `to_locale` is any configured locale. See `Cldr.known_locale_names/1`.
The default is `Cldr.get_locale/0`
## Example
iex> #{inspect __MODULE__}.translate_subdivision("Cumbria", "en", TestBackend.Cldr, "pl")
{:ok, "Kumbria"}
iex> #{inspect __MODULE__}.translate_subdivision("Cumbria", "en", TestBackend.Cldr, "bs")
{:error, {Cldr.UnknownSubdivisionError, "The locale \\"bs\\" has no translation for \\"gbcma\\"."}}
iex> #{inspect __MODULE__}.translate_subdivision("Cumbria", :zzz, TestBackend.Cldr)
{:error, {Cldr.UnknownLocaleError, "The locale :zzz is not known."}}
iex> #{inspect __MODULE__}.translate_subdivision("Cumbria", "en", TestBackend.Cldr, "zzz")
{:error, {Cldr.UnknownLocaleError, "The locale \\"zzz\\" is not known."}}
"""
@spec translate_subdivision(binary(), binary_tag(), Cldr.backend(), binary_tag()) ::
{:ok, binary()} | {:error, error()}
def translate_subdivision(localized_string, from_locale, backend, to_locale \\ Cldr.get_locale()) do
module = Module.concat(backend, Territory)
module.translate_subdivision(localized_string, from_locale, to_locale)
end
@doc """
The same as `translate_territory/3`, but raises an exception if it fails.
## Example
iex> Cldr.Territory.translate_territory!("Reino Unido", "pt", TestBackend.Cldr)
"United Kingdom"
iex> Cldr.Territory.translate_territory!("United Kingdom", "en", TestBackend.Cldr, "pt")
"Reino Unido"
"""
@spec translate_territory!(binary(), binary_tag(), Cldr.backend(), binary_tag()) :: binary() | no_return()
def translate_territory!(localized_string, from_locale, backend, to_locale, style) do
module = Module.concat(backend, Territory)
module.translate_territory!(localized_string, from_locale, to_locale, style)
end
def translate_territory!(localized_string, from_locale) do
backend = Cldr.default_backend!()
module = Module.concat(backend, Territory)
module.translate_territory!(localized_string, from_locale, backend.get_locale(), :standard)
end
def translate_territory!(localized_string, from_locale, backend) do
module = Module.concat(backend, Territory)
module.translate_territory!(localized_string, from_locale, backend.get_locale(), :standard)
end
def translate_territory!(localized_string, from_locale, backend, to_locale) do
module = Module.concat(backend, Territory)
module.translate_territory!(localized_string, from_locale, to_locale, :standard)
end
@doc """
The same as `translate_subdivision/3`, but raises an exception if it fails.
## Example
iex> #{inspect __MODULE__}.translate_subdivision!("Cumbria", "en", TestBackend.Cldr, "pl")
"Kumbria"
#=> #{inspect __MODULE__}.translate_subdivision!("Cumbria", "en", TestBackend.Cldr, "bs")
** (Cldr.UnknownSubdivisionError) The locale "bs" has no translation for "gbcma".
"""
@spec translate_subdivision!(binary(), binary_tag(), Cldr.backend(), binary_tag()) ::
binary() | no_return()
def translate_subdivision!(localized_string, from_locale, backend, to_locale \\ Cldr.get_locale()) do
module = Module.concat(backend, Territory)
module.translate_subdivision!(localized_string, from_locale, to_locale)
end
@doc """
Translate a LanguageTag.t into a localized string from one locale to another.
Returns `{:ok, result}` if successful, otherwise `{:error, reason}`.
* `options` are:
* `locale` is any configured locale. See `Cldr.known_locale_names/1`.
The default is `Cldr.get_locale/0`
* `style` is one of those returned by `Cldr.Territory.available_styles/0`.
The current styles are `:short`, `:standard` and `:variant`.
The default is `:standard`
## Example
iex> Cldr.Territory.translate_language_tag(Cldr.get_locale(), TestBackend.Cldr)
{:ok, "world"}
iex> Cldr.Territory.translate_language_tag(Cldr.get_locale(), TestBackend.Cldr, [locale: Cldr.Locale.new!("pt", TestBackend.Cldr)])
{:ok, "Mundo"}
"""
@spec translate_language_tag(tag(), Cldr.backend(), options()) :: {:ok, binary()} | {:error, error()}
def translate_language_tag(from_locale, backend, options \\ [locale: Cldr.get_locale(), style: :standard]) do
module = Module.concat(backend, Territory)
module.translate_language_tag(from_locale, options)
end
@doc """
The same as `translate_language_tag/2`, but raises an exception if it fails.
## Example
iex> Cldr.Territory.translate_language_tag!(Cldr.get_locale(), TestBackend.Cldr)
"world"
iex> Cldr.Territory.translate_language_tag!(Cldr.get_locale(), TestBackend.Cldr, [locale: Cldr.Locale.new!("pt", TestBackend.Cldr)])
"Mundo"
"""
@spec translate_language_tag!(tag(), Cldr.backend(), options()) :: binary() | no_return()
def translate_language_tag!(from_locale, backend, options \\ [locale: Cldr.get_locale(), style: :standard]) do
module = Module.concat(backend, Territory)
module.translate_language_tag!(from_locale, options)
end
@children Enum.flat_map(@territory_containment, fn {_, v} -> v end)
@doc """
Lists parent(s) for the given territory code.
Returns `{:ok, list}` if successful, otherwise `{:error, reason}`.
* `options` are:
* `as: :atom`
* `as: :binary`
* `as: :charlist`
## Example
iex> Cldr.Territory.parent(:GB)
{:ok, [:"154", :UN]}
iex> Cldr.Territory.parent(:ZZZ)
{:error, {Cldr.UnknownTerritoryError, "The territory :ZZZ is unknown"}}
iex> Cldr.Territory.parent(Cldr.get_locale())
{:error, {Cldr.UnknownChildrenError, "The territory :\\"001\\" has no parent(s)"}}
"""
@spec parent(atom_binary_tag(), as_options()) :: {:ok, atom_binary_charlist()} | {:error, error()}
def parent(territory_code, opts \\ [as: :atom])
def parent(%LanguageTag{territory: territory_code}, opts), do: parent(territory_code, opts)
for code <- [:UN, :EU, :EZ] do
def parent(unquote(code), [as: :atom]), do: {:ok, [:"001"]}
def parent(unquote(code), [as: :binary]), do: {:ok, ["001"]}
def parent(unquote(code), [as: :charlist]), do: {:ok, ['001']}
end
def parent(territory_code, [as: :atom]) do
territory_code
|> Cldr.validate_territory()
|> case do
{:error, error} -> {:error, error}
{:ok, code} -> @children
|> Enum.member?(code)
|> case do
false -> {:error, {Cldr.UnknownChildrenError, "The territory #{inspect code} has no parent(s)"}}
true -> {:ok, @territory_containment
|> Enum.filter(fn({_parent, children}) -> Enum.member?(children, code) end)
|> Enum.map(fn({parent, _children}) -> parent end)
|> Enum.sort()}
end
end
end
def parent(territory_code, [as: :binary]) do
territory_code
|> parent()
|> map_binary()
end
def parent(territory_code, [as: :charlist]) do
territory_code
|> parent()
|> map_charlist()
end
@doc """
The same as `parent/2`, but raises an exception if it fails.
* `options` are:
* `as: :atom`
* `as: :binary`
* `as: :charlist`
## Example
iex> Cldr.Territory.parent!(:GB)
[:"154", :UN]
"""
@spec parent!(atom_binary_tag(), as_options()) :: [atom_binary_charlist()] | no_return()
def parent!(territory_code, opts \\ [as: :atom])
def parent!(%LanguageTag{territory: territory_code}, opts), do: parent!(territory_code, opts)
def parent!(territory_code, [as: :atom]) do
case parent(territory_code) do
{:error, {exception, msg}} -> raise exception, msg
{:ok, result} -> result
end
end
def parent!(territory_code, [as: :binary]) do
territory_code
|> parent()
|> map_binary!()
end
def parent!(territory_code, [as: :charlist]) do
territory_code
|> parent()
|> map_charlist!()
end
@parents (for {k, _v} <- @territory_containment, do: k)
@doc """
Lists children(s) for the given territory code.
Returns `{:ok, list}` if successful, otherwise `{:error, reason}`.
* `options` are:
* `as: :atom`
* `as: :binary`
* `as: :charlist`
## Example
iex> Cldr.Territory.children(:EU)
{:ok,
[:AT, :BE, :CY, :CZ, :DE, :DK, :EE, :ES, :FI, :FR, :GR, :HR, :HU, :IE,
:IT, :LT, :LU, :LV, :MT, :NL, :PL, :PT, :SE, :SI, :SK, :BG, :RO]}
iex> Cldr.Territory.children(:ZZZ)
{:error, {Cldr.UnknownTerritoryError, "The territory :ZZZ is unknown"}}
iex> Cldr.Territory.children(:GB)
{:error, {Cldr.UnknownParentError, "The territory :GB has no children"}}
"""
@spec children(atom_binary_tag(), as_options()) :: {:ok, atom_binary_charlist()} | {:error, error()}
def children(territory_code, opts \\ [as: :atom])
def children(%LanguageTag{territory: territory_code}, opts), do: children(territory_code, opts)
def children(territory_code, [as: :atom]) do
territory_code
|> Cldr.validate_territory()
|> case do
{:error, error} -> {:error, error}
{:ok, code} -> @parents
|> Enum.member?(code)
|> case do
false -> {:error, {Cldr.UnknownParentError, "The territory #{inspect code} has no children"}}
true -> {:ok, @territory_containment[code]}
end
end
end
def children(territory_code, [as: :binary]) do
territory_code
|> children()
|> map_binary()
end
def children(territory_code, [as: :charlist]) do
territory_code
|> children()
|> map_charlist()
end
@doc """
The same as `children/2`, but raises an exception if it fails.
* `options` are:
* `as: :atom`
* `as: :binary`
* `as: :charlist`
## Example
iex> Cldr.Territory.children!(:EU)
[:AT, :BE, :CY, :CZ, :DE, :DK, :EE, :ES, :FI, :FR, :GR, :HR, :HU, :IE, :IT,
:LT, :LU, :LV, :MT, :NL, :PL, :PT, :SE, :SI, :SK, :BG, :RO]
"""
@spec children!(atom_binary_tag(), as_options()) :: [atom_binary_charlist()] | no_return()
def children!(territory_code, opts \\ [as: :atom])
def children!(%LanguageTag{territory: territory_code}, opts), do: children!(territory_code, opts)
def children!(territory_code, [as: :atom]) do
case children(territory_code) do
{:error, {exception, msg}} -> raise exception, msg
{:ok, result} -> result
end
end
def children!(territory_code, [as: :binary]) do
territory_code
|> children()
|> map_binary!()
end
def children!(territory_code, [as: :charlist]) do
territory_code
|> children()
|> map_charlist!()
end
@doc """
Checks relationship between two territories, where the first argument is the `parent` and second the `child`.
Returns `true` if successful, otherwise `false`.
## Example
iex> Cldr.Territory.contains?(:EU, :DK)
true
iex> Cldr.Territory.contains?(:DK, :EU)
false
"""
@spec contains?(atom_tag(), atom_tag()) :: boolean()
def contains?(%LanguageTag{territory: parent}, child), do: contains?(parent, child)
def contains?(parent, %LanguageTag{territory: child}), do: contains?(parent, child)
def contains?(parent, child) do
@parents
|> Enum.member?(parent)
|> case do
false -> false
true -> Enum.member?(@territory_containment[parent], child)
end
end
@doc """
Maps territory info for the given territory code.
Returns `{:ok, map}` if successful, otherwise `{:error, reason}`.
## Example
iex> Cldr.Territory.info(:GB)
{:ok,
%{
currency: [GBP: %{from: ~D[1694-07-27]}],
gdp: 2925000000000,
language_population: %{
"bn" => %{population_percent: 0.67},
"cy" => %{official_status: "official_regional", population_percent: 0.77},
"de" => %{population_percent: 6},
"el" => %{population_percent: 0.33},
"en" => %{official_status: "official", population_percent: 99},
"fr" => %{population_percent: 19},
"ga" => %{official_status: "official_regional", population_percent: 0.026},
"gd" => %{
official_status: "official_regional",
population_percent: 0.099,
writing_percent: 5
},
"it" => %{population_percent: 0.33},
"ks" => %{population_percent: 0.19},
"kw" => %{population_percent: 0.003},
"ml" => %{population_percent: 0.035},
"pa" => %{population_percent: 0.79},
"sco" => %{population_percent: 2.7, writing_percent: 5},
"syl" => %{population_percent: 0.51},
"yi" => %{population_percent: 0.049},
"zh-Hant" => %{population_percent: 0.54}
},
literacy_percent: 99,
measurement_system: %{
default: :uksystem,
paper_size: :a4,
temperature: :uksystem
},
population: 65761100
}}
"""
@spec info(atom_tag()) :: {:ok, map()} | {:error, error()}
def info(%LanguageTag{territory: territory_code}), do: info(territory_code)
def info(territory_code) do
territory_code
|> Cldr.validate_territory()
|> case do
{:error, reason} -> {:error, reason}
{:ok, code} -> {:ok, @territory_info[code]}
end
end
@doc """
The same as `info/1`, but raises an exception if it fails.
## Example
iex> Cldr.Territory.info!(:GB)
%{
currency: [GBP: %{from: ~D[1694-07-27]}],
gdp: 2925000000000,
language_population: %{
"bn" => %{population_percent: 0.67},
"cy" => %{official_status: "official_regional", population_percent: 0.77},
"de" => %{population_percent: 6},
"el" => %{population_percent: 0.33},
"en" => %{official_status: "official", population_percent: 99},
"fr" => %{population_percent: 19},
"ga" => %{official_status: "official_regional", population_percent: 0.026},
"gd" => %{
official_status: "official_regional",
population_percent: 0.099,
writing_percent: 5
},
"it" => %{population_percent: 0.33},
"ks" => %{population_percent: 0.19},
"kw" => %{population_percent: 0.003},
"ml" => %{population_percent: 0.035},
"pa" => %{population_percent: 0.79},
"sco" => %{population_percent: 2.7, writing_percent: 5},
"syl" => %{population_percent: 0.51},
"yi" => %{population_percent: 0.049},
"zh-Hant" => %{population_percent: 0.54}
},
literacy_percent: 99,
measurement_system: %{
default: :uksystem,
paper_size: :a4,
temperature: :uksystem
},
population: 65761100
}
"""
@spec info!(atom_tag()) :: map() | no_return()
def info!(%LanguageTag{territory: territory_code}), do: info!(territory_code)
def info!(territory_code) do
case info(territory_code) do
{:error, {exception, msg}} -> raise exception, msg
{:ok, result} -> result
end
end
@doc """
Unicode flag for the given territory code.
Returns `{:ok, flag}` if successful, otherwise `{:error, reason}`.
## Example
iex> Cldr.Territory.to_unicode_flag(:US)
{:ok, "🇺🇸"}
iex> Cldr.Territory.to_unicode_flag(:EZ)
{:error, {Cldr.UnknownFlagError, "The territory :EZ has no flag"}}
"""
@spec to_unicode_flag(atom_binary_tag() | {:ok, atom()} | {:error, error()}) :: {:ok, binary()} | {:error, error()}
def to_unicode_flag(%LanguageTag{territory: territory_code}), do: to_unicode_flag(territory_code)
def to_unicode_flag({:error, reason}), do: {:error, reason}
def to_unicode_flag({:ok, territory_code}) do
case flag_exists?(territory_code) do
false -> {:error, {Cldr.UnknownFlagError, "The territory #{inspect territory_code} has no flag"}}
true -> {:ok, territory_code
|> Atom.to_charlist()
|> Enum.map(&to_unicode_font/1)
|> List.to_string()}
end
end
def to_unicode_flag(territory_code), do: territory_code |> Cldr.validate_territory() |> to_unicode_flag()
@doc """
The same as `to_unicode_flag/1`, but raises an exception if it fails.
## Example
iex> Cldr.Territory.to_unicode_flag!(:US)
"🇺🇸"
"""
@spec to_unicode_flag!(atom_binary_tag()) :: binary() | no_return()
def to_unicode_flag!(%LanguageTag{territory: territory_code}), do: to_unicode_flag!(territory_code)
def to_unicode_flag!(territory_code) do
case to_unicode_flag(territory_code) do
{:error, {exception, msg}} -> raise exception, msg
{:ok, result} -> result
end
end
# https://en.wikipedia.org/wiki/Regional_Indicator_Symbol
defp flag_exists?(territory_code) do
:"001"
|> children!()
|> Enum.flat_map(fn c -> Enum.flat_map(children!(c), &children!/1) end)
|> Enum.concat([:EU, :UN])
|> Enum.member?(territory_code)
end
# Generates functions that returns the unicode font for A-Z
for number <- ?A..?Z do
defp to_unicode_font(unquote(number)), do: [127400 + unquote(number) - 3]
end
@doc """
A helper method to get a territory's currency code
if a territory has multiply currencies then the oldest active currency is returned.
Returns `{:ok, code}` if successful, otherwise `{:error, reason}`.
* `options` are:
* `as: :atom`
* `as: :binary`
* `as: :charlist`
## Example
iex> Cldr.Territory.to_currency_code(:US)
{:ok, :USD}
iex> Cldr.Territory.to_currency_code("cu")
{:ok, :CUP}
"""
@spec to_currency_code(atom_binary_tag(), as_options()) :: {:ok, atom_binary_charlist()} | {:error, error()}
def to_currency_code(territory_code, opts \\ [as: :atom])
def to_currency_code(%LanguageTag{territory: territory_code}, opts), do: to_currency_code(territory_code, opts)
def to_currency_code(territory_code, [as: :atom]) do
case info(territory_code) do
{:error, reason} -> {:error, reason}
{:ok, territory} -> {:ok, territory |> sort_currency() |> Kernel.hd()}
end
end
def to_currency_code(territory_code, [as: :binary]) do
territory_code
|> to_currency_code()
|> map_binary()
end
def to_currency_code(territory_code, [as: :charlist]) do
territory_code
|> to_currency_code()
|> map_charlist()
end
@doc """
The same as `to_currency_code/2`, but raises an exception if it fails.
* `options` are:
* `as: :atom`
* `as: :binary`
* `as: :charlist`
## Example
iex> Cldr.Territory.to_currency_code!(:US)
:USD
iex> Cldr.Territory.to_currency_code!(:US, as: :charlist)
'USD'
iex> Cldr.Territory.to_currency_code!("PS")
:ILS
iex> Cldr.Territory.to_currency_code!("PS", as: :binary)
"ILS"
"""
@spec to_currency_code!(atom_binary_tag(), as_options()) :: atom_binary_charlist() | no_return()
def to_currency_code!(territory_code, opts \\ [as: :atom])
def to_currency_code!(%LanguageTag{territory: territory_code}, opts), do: to_currency_code(territory_code, opts)
def to_currency_code!(territory_code, [as: :atom]) do
case to_currency_code(territory_code) do
{:error, {exception, msg}} -> raise exception, msg
{:ok, result} -> result
end
end
def to_currency_code!(territory_code, [as: :binary]) do
territory_code
|> to_currency_code()
|> map_binary!()
end
def to_currency_code!(territory_code, [as: :charlist]) do
territory_code
|> to_currency_code()
|> map_charlist!()
end
@doc """
A helper method to get a territory's currency codes.
Returns `{:ok, list}` if successful, otherwise `{:error, reason}`.
* `options` are:
* `as: :atom`
* `as: :binary`
* `as: :charlist`
## Example
iex> Cldr.Territory.to_currency_codes(:US)
{:ok, [:USD]}
iex> Cldr.Territory.to_currency_codes("cu")
{:ok, [:CUP, :CUC]}
"""
@spec to_currency_codes(atom_binary_tag(), as_options()) :: {:ok, [atom_binary_charlist()]} | {:error, error()}
def to_currency_codes(territory_code, opts \\ [as: :atom])
def to_currency_codes(territory_code, [as: :atom]) do
case info(territory_code) do
{:error, reason} -> {:error, reason}
{:ok, territory} -> {:ok, sort_currency(territory)}
end
end
def to_currency_codes(territory_code, [as: :binary]) do
territory_code
|> to_currency_codes()
|> map_binary()
end
def to_currency_codes(territory_code, [as: :charlist]) do
territory_code
|> to_currency_codes()
|> map_charlist()
end
@doc """
The same as `to_currency_codes/2`, but raises an exception if it fails.
* `options` are:
* `as: :atom`
* `as: :binary`
* `as: :charlist`
## Example
iex> Cldr.Territory.to_currency_codes!(:US)
[:USD]
iex> Cldr.Territory.to_currency_codes!(:US, as: :charlist)
['USD']
iex> Cldr.Territory.to_currency_codes!("PS")
[:ILS, :JOD]
iex> Cldr.Territory.to_currency_codes!("PS", as: :binary)
["ILS", "JOD"]
"""
@spec to_currency_codes!(atom_binary_tag(), as_options()) :: [atom_binary_charlist()] | no_return()
def to_currency_codes!(territory_code, opts \\ [as: :atom])
def to_currency_codes!(territory_code, [as: :atom]) do
case to_currency_codes(territory_code) do
{:error, {exception, msg}} -> raise exception, msg
{:ok, result} -> result
end
end
def to_currency_codes!(territory_code, [as: :binary]) do
territory_code
|> to_currency_codes()
|> map_binary!()
end
def to_currency_codes!(territory_code, [as: :charlist]) do
territory_code
|> to_currency_codes()
|> map_charlist!()
end
defp sort_currency(%{currency: currency}) do
currency
|> Enum.filter(fn {_key, meta} -> !Map.has_key?(meta, :tender) and !Map.has_key?(meta, :to) end)
|> Enum.sort(&(elem(&1, 1).from < elem(&2, 1).from))
|> Keyword.keys()
end
@regions ["005", "011", "013", "014", "015", "017",
"018", "021", "029", "030", "034", "035",
"039", "053", "054", "057", "061", "143",
"145", "151", "154", "155"]
@doc """
Returns a list of country codes.
* `options` are:
* `as: :atom`
* `as: :binary`
* `as: :charlist`
## Example
=> Cldr.Territory.country_codes()
[:AD, :AE, :AF, :AG, :AI, :AL, :AM, :AO, :AR, :AS, :AT, :AU, :AW,
:AX, :AZ, :BA, :BB, :BD, :BE, :BF, :BG, :BH, :BI, :BJ, :BL, :BM,
:BN, :BO, :BQ, :BR, :BS, :BT, :BV, :BW, :BY, :BZ, :CA, :CC, :CD,
:CF, :CG, :CH, :CI, :CK, :CL, :CM, :CN, :CO, :CR, :CU, ...]
"""
@spec country_codes(as_options()) :: [atom_binary_charlist()]
def country_codes(opts \\ [as: :atom])
def country_codes([as: :atom]) do
@regions
|> Enum.flat_map(&children!/1)
|> Enum.sort()
end
def country_codes([as: :binary]), do: map_binary(country_codes())
def country_codes([as: :charlist]), do: map_charlist(country_codes())
defp map_binary({:error, reason}), do: {:error, reason}
defp map_binary({:ok, result}), do: {:ok, map_binary(result)}
defp map_binary(result) when is_list(result) do
Enum.map(result, &to_string/1)
end
defp map_binary(result) when is_atom(result), do: to_string(result)
defp map_binary!({:error, {exception, reason}}), do: raise exception, reason
defp map_binary!({:ok, result}), do: map_binary(result)
defp map_charlist({:error, reason}), do: {:error, reason}
defp map_charlist({:ok, result}), do: {:ok, map_charlist(result)}
defp map_charlist(result) when is_list(result) do
Enum.map(result, &to_charlist/1)
end
defp map_charlist(result) when is_atom(result), do: to_charlist(result)
defp map_charlist!({:error, {exception, reason}}), do: raise exception, reason
defp map_charlist!({:ok, result}), do: map_charlist(result)
@doc false
def normalize_name(string) do
string
|> String.downcase()
|> String.replace(" & ", "")
|> String.replace(".", "")
|> String.replace(~r/(\s)+/u, "\\1")
end
end
|
lib/cldr/territory.ex
| 0.908448
| 0.468183
|
territory.ex
|
starcoder
|
defmodule Absinthe.Relay.Connection.Notation do
@moduledoc """
Macros used to define Connection-related schema entities
See `Absinthe.Relay.Connection` for more information.
If you wish to use this module on its own without `use Absinthe.Relay` you
need to include
```
@pipeline_modifier Absinthe.Relay.Schema
```
in your root schema module.
"""
alias Absinthe.Blueprint.Schema
@naming_attrs [:node_type, :non_null, :non_null_connection, :non_null_edges, :non_null_edge, :connection]
defmodule Naming do
@moduledoc false
defstruct base_identifier: nil,
node_type_identifier: nil,
connection_type_identifier: nil,
edge_type_identifier: nil,
non_null_connection: false,
non_null_edges: false,
non_null_edge: false,
attrs: []
def from_attrs!(attrs) do
node_type_identifier =
attrs[:node_type] ||
raise(
"Must provide a `:node_type' option (an optional `:connection` option is also supported)"
)
base_identifier = attrs[:connection] || node_type_identifier
non_null_connection = attrs[:non_null_connection] || false
non_null_edges = attrs[:non_null_edges] || attrs[:non_null] || false
non_null_edge = attrs[:non_null_edge] || attrs[:non_null] || false
%__MODULE__{
node_type_identifier: node_type_identifier,
base_identifier: base_identifier,
connection_type_identifier: ident(base_identifier, :connection),
edge_type_identifier: ident(base_identifier, :edge),
non_null_connection: non_null_connection,
non_null_edges: non_null_edges,
non_null_edge: non_null_edge,
attrs: [
node_type: node_type_identifier,
connection: base_identifier,
non_null_edges: non_null_edges,
non_null_edge: non_null_edge
]
}
end
defp ident(base, category) do
:"#{base}_#{category}"
end
end
@doc """
Define a connection type for a given node type.
## Examples
A basic connection for a node type, `:pet`. This well generate simple
`:pet_connection` and `:pet_edge` types for you:
```
connection node_type: :pet
```
You can provide a custom name for the connection type (just don't include the
word "connection"). You must still provide the `:node_type`. You can create as
many different connections to a node type as you want.
This example will create a connection type, `:favorite_pets_connection`, and
an edge type, `:favorite_pets_edge`:
```
connection :favorite_pets, node_type: :pet
```
You can customize the connection object just like any other `object`:
```
connection :favorite_pets, node_type: :pet do
field :total_age, :float do
resolve fn
_, %{source: conn} ->
sum = conn.edges
|> Enum.map(fn edge -> edge.node.age)
|> Enum.sum
{:ok, sum}
end
end
edge do
# ...
end
end
```
Just remember that if you use the block form of `connection`, you must call
the `edge` macro within the block to make sure the edge type is generated.
See the `edge` macro below for more information.
"""
defmacro connection({:field, _, [identifier, attrs]}, do: block) when is_list(attrs) do
do_connection_field(identifier, attrs, block)
end
defmacro connection(attrs, do: block) do
naming = Naming.from_attrs!(attrs)
do_connection_definition(naming, attrs, block)
end
defmacro connection(identifier, attrs) do
naming = Naming.from_attrs!(attrs |> Keyword.put(:connection, identifier))
do_connection_definition(naming, attrs, [])
end
defmacro connection(attrs) do
naming = Naming.from_attrs!(attrs)
do_connection_definition(naming, attrs, [])
end
defmacro connection(identifier, attrs, do: block) do
naming = Naming.from_attrs!(attrs |> Keyword.put(:connection, identifier))
do_connection_definition(naming, attrs, block)
end
defp do_connection_field(identifier, attrs, block) do
paginate = Keyword.get(attrs, :paginate, :both)
naming = Naming.from_attrs!(attrs)
type = build_connection_field_type(naming)
field_attrs =
attrs
|> Keyword.drop([:paginate] ++ @naming_attrs)
|> Keyword.put(:type, type)
quote do
field unquote(identifier), unquote(field_attrs) do
private(:absinthe_relay, {:paginate, unquote(paginate)}, {:fill, unquote(__MODULE__)})
unquote(block)
end
end
end
defp do_connection_definition(naming, attrs, block) do
identifier = naming.connection_type_identifier
attrs = Keyword.drop(attrs, @naming_attrs)
block = name_edge(block, naming.attrs)
edge_field = build_edge_type(naming)
quote do
object unquote(identifier), unquote(attrs) do
private(
:absinthe_relay,
{:connection, unquote(naming.attrs)},
{:fill, unquote(__MODULE__)}
)
field(:page_info, type: non_null(:page_info))
field(:edges, type: unquote(edge_field))
unquote(block)
end
end
end
defp build_connection_field_type(%{non_null_connection: true} = naming) do
quote do
non_null(unquote(naming.connection_type_identifier))
end
end
defp build_connection_field_type(%{non_null_connection: false} = naming) do
quote do
unquote(naming.connection_type_identifier)
end
end
defp build_edge_type(%{non_null_edge: true, non_null_edges: true} = naming) do
quote do
non_null(list_of(non_null(unquote(naming.edge_type_identifier))))
end
end
defp build_edge_type(%{non_null_edge: true} = naming) do
quote do
list_of(non_null(unquote(naming.edge_type_identifier)))
end
end
defp build_edge_type(%{non_null_edges: true} = naming) do
quote do
non_null(list_of(unquote(naming.edge_type_identifier)))
end
end
defp build_edge_type(naming) do
quote do
list_of(unquote(naming.edge_type_identifier))
end
end
defp name_edge([], _), do: []
defp name_edge({:edge, meta, [[do: block]]}, conn_attrs) do
{:edge, meta, [conn_attrs, [do: block]]}
end
defp name_edge({:__block__, meta, content}, conn_attrs) do
content =
Enum.map(content, fn
{:edge, meta, [[do: block]]} ->
{:edge, meta, [conn_attrs, [do: block]]}
{:edge, meta, [attrs, [do: block]]} ->
{:edge, meta, [conn_attrs ++ attrs, [do: block]]}
node ->
node
end)
{:__block__, meta, content}
end
@doc """
Customize the edge type.
## Examples
```
connection node_type: :pet do
# ...
edge do
field :node_name_backwards, :string do
resolve fn
_, %{source: edge} ->
{:ok, edge.node.name |> String.reverse}
end
end
end
end
```
"""
defmacro edge(attrs, do: block) do
naming = Naming.from_attrs!(attrs)
attrs = Keyword.drop(attrs, @naming_attrs)
quote do
Absinthe.Schema.Notation.stash()
object unquote(naming.edge_type_identifier), unquote(attrs) do
private(:absinthe_relay, {:edge, unquote(naming.attrs)}, {:fill, unquote(__MODULE__)})
unquote(block)
end
Absinthe.Schema.Notation.pop()
end
end
def additional_types({:connection, attrs}, _) do
naming = Naming.from_attrs!(attrs)
identifier = naming.edge_type_identifier
%Schema.ObjectTypeDefinition{
name: identifier |> Atom.to_string() |> Macro.camelize(),
identifier: identifier,
module: __MODULE__,
__private__: [absinthe_relay: [{{:edge, attrs}, {:fill, __MODULE__}}]],
__reference__: Absinthe.Schema.Notation.build_reference(__ENV__)
}
end
def additional_types(_, _), do: []
def fillout({:paginate, type}, node) do
Map.update!(node, :arguments, fn arguments ->
type
|> paginate_args()
|> Enum.map(fn {id, type} -> build_arg(id, type) end)
|> put_uniq(arguments)
end)
end
# @desc "The item at the end of the edge"
# field(:node, unquote(naming.node_type_identifier))
# @desc "A cursor for use in pagination"
# field(:cursor, non_null(:string))
def fillout({:edge, attrs}, node) do
naming = Naming.from_attrs!(attrs)
Map.update!(node, :fields, fn fields ->
naming.node_type_identifier
|> edge_fields
|> put_uniq(fields)
end)
end
def fillout(_, node) do
node
end
defp put_uniq(new, prior) do
existing = MapSet.new(prior, & &1.identifier)
new
|> Enum.filter(&(!(&1.identifier in existing)))
|> Enum.concat(prior)
end
defp edge_fields(node_type) do
[
%Schema.FieldDefinition{
name: "node",
identifier: :node,
type: node_type,
module: __MODULE__,
__reference__: Absinthe.Schema.Notation.build_reference(__ENV__)
},
%Schema.FieldDefinition{
name: "cursor",
identifier: :cursor,
type: :string,
module: __MODULE__,
__reference__: Absinthe.Schema.Notation.build_reference(__ENV__)
}
]
end
defp paginate_args(:forward) do
[after: :string, first: :integer]
end
defp paginate_args(:backward) do
[before: :string, last: :integer]
end
defp paginate_args(:both) do
paginate_args(:forward) ++ paginate_args(:backward)
end
defp build_arg(id, type) do
%Schema.InputValueDefinition{
name: id |> Atom.to_string(),
identifier: id,
type: type,
module: __MODULE__,
__reference__: Absinthe.Schema.Notation.build_reference(__ENV__)
}
end
end
|
lib/absinthe/relay/connection/notation.ex
| 0.885291
| 0.673514
|
notation.ex
|
starcoder
|
defmodule Tirexs.Mapping do
@moduledoc """
Provides DSL-like macros for indices definition.
The mapping could be defined alongside with `settings` or just only `mappings`.
Mappings and Settings definition:
index = [index: "articles", type: "article"]
settings do
analysis do
analyzer "autocomplete_analyzer",
[
filter: ["lowercase", "asciifolding", "edge_ngram"],
tokenizer: "whitespace"
]
filter "edge_ngram", [type: "edgeNGram", min_gram: 1, max_gram: 15]
end
end
mappings dynamic: false do
indexes "country", type: "string"
indexes "city", type: "string"
indexes "suburb", type: "string"
indexes "road", type: "string"
indexes "postcode", type: "string", index: "not_analyzed"
indexes "housenumber", type: "string", index: "not_analyzed"
indexes "coordinates", type: "geo_point"
indexes "full_address", type: "string", analyzer: "autocomplete_analyzer"
end
Tirexs.Mapping.create_resource(index)
"""
use Tirexs.DSL.Logic
@doc false
defmacro __using__(_) do
quote do
import unquote(Tirexs.Index.Settings), only: :macros
import unquote(__MODULE__), only: :macros
end
end
@doc false
defmacro mappings(params, [do: block]) do
mappings = Keyword.merge(params, [properties: extract(block)])
quote_mappings(mappings)
end
@doc false
defmacro mappings([do: block]) do
mappings = [properties: extract(block)]
quote_mappings(mappings)
end
defp quote_mappings(mappings) do
quote do
var!(index) = var!(index) ++ [mapping: unquote(mappings)]
end
end
alias Tirexs.{Resources, HTTP}
@doc false
def transpose(block) do
case block do
{:indexes, _, [params]} -> indexes(params[:do])
{:indexes, _, options} -> indexes(options)
{:index, _, [params]} -> indexes(params[:do])
{:index, _, options} -> indexes(options)
end
end
@doc false
def indexes(options) do
case options do
[name, options] ->
if options[:do] != nil do
block = options
options = [type: "object"]
[ {to_atom(name), options ++ [properties: extract(block[:do])]} ]
else
[ {to_atom(name), options} ]
end
[name, options, block] ->
[ {to_atom(name), options ++ [properties: extract(block[:do])]} ]
end
end
@doc false
def create_resource(definition, uri \\ Tirexs.get_uri_env()) do
cond do
definition[:settings] ->
body = to_resource_json(definition)
HTTP.put("#{definition[:index]}", uri, body)
definition[:type] ->
create_resource_settings(definition, uri)
body = to_resource_json(definition)
Resources.bump!(body, uri)._mapping(definition[:index], definition[:type])
true ->
body = to_resource_json(definition, definition[:index])
Resources.bump!(body, uri)._mapping(definition[:index])
end
end
@doc false
def create_resource_settings(definition, uri \\ Tirexs.get_uri_env()) do
unless Resources.exists?(definition[:index], uri) do
HTTP.put(definition[:index], uri)
end
end
@doc false
def to_resource_json(definition) do
to_resource_json(definition, definition[:type])
end
@doc false
def to_resource_json(definition, type) do
# definition w/ mappings and settings
if definition[:settings] != nil do
[ {:mappings, [{to_atom(type), definition[:mapping]}]}, {:settings, definition[:settings]} ]
# definition just only w/ mapping
else
[ {to_atom(type), definition[:mapping]} ]
end
end
end
|
lib/tirexs/mapping.ex
| 0.682997
| 0.510741
|
mapping.ex
|
starcoder
|
defmodule Runlet.Cmd.Query do
require Logger
@moduledoc "Query a riemann server"
defstruct url: "",
host: "",
port: 80,
query: "",
retry: 3_000,
conn: nil,
ref: nil,
m: nil
@type t :: %__MODULE__{
url: String.t(),
host: String.t(),
port: non_neg_integer,
query: String.t(),
retry: non_neg_integer,
conn: pid | nil,
ref: reference | nil,
m: reference | nil
}
@riemann_url "/event/index?query="
@riemann_host "localhost"
@riemann_port "8080"
defp riemann_url,
do: Runlet.Config.get(:runlet, :riemann_url, @riemann_url)
defp riemann_host,
do: Runlet.Config.get(:runlet, :riemann_host, @riemann_host)
defp riemann_port,
do:
Runlet.Config.get(:runlet, :riemann_port, @riemann_port)
|> String.to_integer()
defp riemann_event, do: Runlet.Config.get(:runlet, :riemann_event, [])
defp riemann_retry_interval,
do:
Runlet.Config.get(:runlet, :riemann_retry_interval, "3000")
|> String.to_integer()
@doc """
Filters events from a riemann server using the Riemann query
language. The query must be quoted as a string:
'state = "ok"'
Examples of Queries:
# Simple equality
state = "ok"
# Wildcards
(service =~ "disk%") or
(state != "critical" and host =~ "%.trioptimum.com")
# Standard operator precedence applies
metric_f > 2.0 and not host = nil
# Anything with a tag "product"
tagged "product"
# All states
true
Examples from the test suite:
https://github.com/riemann/riemann/blob/master/test/riemann/query_test.clj
Query Grammar:
https://github.com/riemann/riemann/blob/master/resources/query.g4
"""
@spec exec(String.t()) :: Enumerable.t()
def exec(q) do
exec(q, riemann_event())
end
@doc false
@spec exec(String.t(), [atom]) :: Enumerable.t()
def exec(q, structs) do
cfg = %Runlet.Cmd.Query{
host: riemann_host(),
port: riemann_port(),
url: riemann_url(),
retry: riemann_retry_interval(),
query: q
}
startfun = fn ->
open(cfg)
end
resourcefun = fn %Runlet.Cmd.Query{conn: conn, ref: ref, m: m} = state ->
receive do
{:gun_sse, ^conn, ^ref, %{event_type: "message", data: data}} ->
case service(data, structs) do
{:ok, e} ->
{[%Runlet.Event{query: q, event: e}], state}
{:error, error} ->
Logger.error(%{json_parse: error, query: q, data: data})
{[], state}
end
{:gun_sse, ^conn, ^ref, event} ->
Logger.info(%{gun_sse: event})
{[], state}
{:gun_sse, conn, _, _} ->
:gun.close(conn)
{[], state}
{:gun_error, ^conn, ^ref, reason} ->
Logger.error(%{gun_error: reason})
Process.demonitor(m, [:flush])
close(state)
t = open(state)
{[], t}
{:gun_error, conn, _, _} ->
:gun.close(conn)
{[], state}
{:gun_error, ^conn, reason} ->
Logger.error(%{gun_error: reason})
Process.demonitor(m, [:flush])
close(state)
t = open(state)
{[], t}
{:gun_error, conn, _} ->
:gun.close(conn)
{[], state}
{:gun_down, conn, _, reason, streams, _} ->
Logger.info(%{gun_down: reason, streams: streams})
:gun.close(conn)
{[], state}
{:gun_up, _, _} ->
Logger.info(%{gun_up: "reconnecting"})
t = get(state)
{[], t}
{:DOWN, ^m, :process, _, _} ->
Logger.info(%{down: "reconnecting"})
close(state)
t = open(state)
{[], t}
{:DOWN, _, :process, _, _} ->
{[], state}
{:runlet_stdin, stdin} ->
{[
%Runlet.Event{
query: q,
event: %Runlet.Event.Stdin{description: "#{stdin}"}
}
], state}
{:runlet_stdout, stdout} ->
{[
%Runlet.Event{
query: q,
event: %Runlet.Event.Stdout{description: "#{stdout}"}
}
], state}
{:runlet_signal, "SIGHUP"} ->
Process.demonitor(m, [:flush])
close(%{state | retry: 0})
t = open(state)
{[
%Runlet.Event{
query: q,
event: %Runlet.Event.Signal{description: "SIGHUP: reconnecting"}
}
], t}
{:runlet_signal, signal} ->
{[
%Runlet.Event{
query: q,
event: %Runlet.Event.Signal{description: signal}
}
], state}
:runlet_exit ->
{:halt, state}
# unhandled ->
# Logger.info(%{unhandled_resource: unhandled})
# {[], state}
end
end
endfun = fn %Runlet.Cmd.Query{conn: conn} ->
:gun.close(conn)
end
Stream.resource(
startfun,
resourcefun,
endfun
)
end
@spec open(t) :: t
defp open(%Runlet.Cmd.Query{host: host, port: port, retry: retry} = state) do
opt = %{
protocols: [:http],
http_opts: %{content_handlers: [:gun_sse_h, :gun_data_h]},
connect_timeout: retry,
retry: 3,
retry_timeout: retry
}
result = :gun.open(String.to_charlist(host), port, opt)
case result do
{:ok, conn} ->
m = Process.monitor(conn)
case :gun.await_up(conn, m) do
{:ok, _} ->
get(%{state | conn: conn, m: m})
{:error, error} ->
Process.demonitor(m, [:flush])
Logger.error(%{gun_await_up: error})
close(state)
open(state)
end
{:error, error} ->
Logger.info(%{gun_open: error})
:timer.sleep(retry)
open(state)
end
end
@spec close(t) :: :ok
defp close(%Runlet.Cmd.Query{conn: conn, retry: retry}) do
:gun.close(conn)
:timer.sleep(retry)
end
@spec get(t) :: t
defp get(
%Runlet.Cmd.Query{
url: url,
query: query0,
m: m,
conn: conn
} = state0
) do
query =
query0
|> URI.encode(&URI.char_unreserved?/1)
ref =
:gun.get(conn, String.to_charlist(url <> query), [
{"accept", "text/event-stream"}
])
state = %{state0 | ref: ref}
response =
receive do
{:gun_response, ^conn, ^ref, :nofin, 200, _} ->
{:ok, state}
{:gun_response, ^conn, ^ref, :nofin, 500, _} ->
:data_error
{:gun_response, ^conn, ^ref, is_fin, status, headers} ->
{:open, is_fin, status, headers}
after
5000 ->
{:error, :timeout}
end
case response do
{:ok, n} ->
n
{:error, _} = error ->
Process.demonitor(m, [:flush])
Logger.info(%{gun_get: error})
close(state)
open(state)
{:open, is_fin, status, headers} ->
Process.demonitor(m, [:flush])
Logger.info(%{
fin: is_fin,
status: status,
headers: headers
})
close(state)
open(state)
:data_error ->
receive do
{:gun_data, ^conn, ^ref, :fin,
"no viable alternative at input" <> _ = t} ->
parse_error(state, t)
{:gun_data, ^conn, ^ref, :fin, "token recognition error at:" <> _ = t} ->
parse_error(state, t)
{:gun_data, ^conn, ^ref, :fin, "mismatched input" <> _ = t} ->
parse_error(state, t)
{:gun_data, ^conn, ^ref, :fin, "extraneous input" <> _ = t} ->
parse_error(state, t)
{:gun_data, ^conn, ^ref, :fin, "missing" <> _ = t} ->
parse_error(state, t)
{:gun_data, ^conn, ^ref, :fin, "parse error:" <> _ = t} ->
parse_error(state, t)
{:gun_data, ^conn, ^ref, :fin, t} ->
Logger.info(%{fin: t})
Process.demonitor(m, [:flush])
close(state)
open(state)
{:gun_error, ^conn, ^ref, reason} ->
Logger.info(%{gun_error: reason})
Process.demonitor(m, [:flush])
close(state)
open(state)
{:gun_error, ^conn, reason} ->
Logger.info(%{gun_error: reason})
Process.demonitor(m, [:flush])
close(state)
open(state)
end
end
end
defp parse_error(state, body) do
Logger.info(%{parse_error: state, body: body})
Kernel.send(self(), {:runlet_stdout, "query error: " <> body})
Kernel.send(self(), :runlet_exit)
Process.demonitor(state.m, [:flush])
state
end
defp service(event, []) do
Poison.decode(event)
end
defp service(event, [struct | structs]) do
e =
case struct do
{as, n} ->
Poison.decode(
event,
as: struct(as, Enum.map(n, fn {k, v} -> {k, struct(v)} end))
)
as ->
Poison.decode(event, as: struct(as))
end
case e do
{:ok, t} ->
case Vex.valid?(t) do
true -> e
false -> service(event, structs)
end
{:error, _} ->
service(event, structs)
end
end
end
|
lib/runlet/cmd/query.ex
| 0.730194
| 0.427456
|
query.ex
|
starcoder
|
defmodule Blockchain.Blocktree do
@moduledoc """
Blocktree provides functions for adding blocks to the
overall blocktree and forming a consistent blockchain.
"""
defmodule InvalidBlockError do
defexception [:message]
end
alias Blockchain.Block
alias Blockchain.Chain
alias Blockchain.Genesis
alias MerklePatriciaTree.TrieStorage
defstruct best_block: nil
@type t :: %__MODULE__{best_block: Block.t() | nil}
@doc """
Creates a new empty blocktree.
"""
@spec new_tree() :: t
def new_tree() do
%__MODULE__{}
end
@doc """
Verifies a block is valid, and if so, adds it to the block tree.
This performs four steps.
1. Find the parent block
2. Verfiy the block against its parent block
3. If valid, put the block into our DB
"""
@spec verify_and_add_block(t, Chain.t(), Block.t(), TrieStorage.t(), boolean(), binary() | nil) ::
{:ok, {t, TrieStorage.t(), EVM.hash()}} | {:invalid, [atom()]}
def verify_and_add_block(
blocktree,
chain,
block,
trie,
do_validate \\ true,
specified_block_hash \\ nil
) do
parent =
case Block.get_parent_block(block, trie) do
:genesis -> nil
{:ok, parent} -> parent
:not_found -> :parent_not_found
end
validation =
if do_validate,
do: Block.validate(block, chain, parent, trie),
else: {:valid, trie}
with {:valid, trie} <- validation do
{:ok, {block_hash, updated_trie}} = Block.put_block(block, trie, specified_block_hash)
# Cache computed block hash
block = %{block | block_hash: block_hash}
updated_blocktree = update_best_block(blocktree, block)
{:ok, {updated_blocktree, updated_trie, block_hash}}
end
end
@spec update_best_block(t, Block.t()) :: t
def update_best_block(blocktree, block) do
best_block = blocktree.best_block
new_best_block =
if is_nil(best_block) || block.header.number > best_block.header.number ||
(block.header.number == best_block.header.number &&
block.header.difficulty > best_block.header.difficulty),
do: block,
else: best_block
# Make sure block is stored with a block hash
new_best_block_with_hash =
if new_best_block.block_hash == nil do
%{new_best_block | block_hash: Block.hash(new_best_block)}
else
new_best_block
end
%{blocktree | best_block: new_best_block_with_hash}
end
@doc """
Returns the best block in a tree, which is either the listed best block,
or it's the genesis block, which we create.
Note: we load the block by the block_hash, instead of taking it
directly from the tree.
"""
@spec get_best_block(t(), Chain.t(), TrieStorage.t()) :: {:ok, {Block.t(), TrieStorage.t()}}
def get_best_block(blocktree, chain, trie) do
if block = blocktree.best_block do
block_hash = Block.fetch_block_hash(block)
{:ok, {%{block | block_hash: block_hash}, trie}}
else
{block, new_trie} = Genesis.create_block(chain, trie)
{:ok, {_hash, trie_with_block}} = Block.put_block(block, new_trie)
{:ok, {block, trie_with_block}}
end
end
end
|
apps/blockchain/lib/blockchain/blocktree.ex
| 0.869077
| 0.63576
|
blocktree.ex
|
starcoder
|
defmodule Greetings do
@moduledoc """
This is a Greeting library
For this contrived example we have
- hello() which just returns :world
- hello(atom) which just returns atom
- hello(string) which just returns "Hello {string}"
- greet(name) which returns a "Hello $name!" (english is default language)
- greet(name, lang = [:EN]) which returns a language specific "Hello $name!"
"""
@doc """
Super simple greeting to the world
With no arguments, it returns :world
With any atom, it returns that atom
With any string, it returns "Hello {string}"
Anything else raises an exception (ugly)
## Examples
iex> Greetings.hello()
:world
iex> Greetings.hello(:world)
:world
iex> Greetings.hello(:people)
:people
iex> Greetings.hello("you")
"Hello you"
iex> Greetings.hello("Joe")
"Hello Joe"
"""
def hello(), do: :world
def hello(myarg) when is_atom(myarg), do: myarg
def hello(myarg) when is_bitstring(myarg), do: "Hello #{myarg}"
def hello(_), do: raise "Unsupported Argument to hello()"
@doc """
Greet is our main greetings function.
With a name and a supported language, it will return a valid greeting.
With a name, it will return a valid greeting in English.
## Examples
iex> Greetings.greet("Sally")
"Hello Sally!"
iex> Greetings.greet("Bob", :EN)
"Hello Bob!"
iex> Greetings.greet("Bob", "EN")
"Hello Bob!"
iex> Greetings.greet("Jose", "ES")
"Hola Jose!"
iex> Greetings.greet(7, "ES")
"Hola Number 7!"
iex> Greetings.greet("")
"Hello Mystery Person!"
iex> Greetings.greet()
"Hello Nobody!"
"""
def greet(name, lang \\ :EN)
def greet(name, lang) when is_bitstring(name) or is_atom(name) do
hello = lang |> LanguageHelpers.salutation()
"#{hello} #{clean_name(name)}!"
end
# Task: when you get a number, change it to "Number <arg>" and recurse into greet()
def greet(name, lang) when is_integer(name) do
:need_to_build
end
# Task: when you get a list of names, join them with a comma
def greet(names, lang) when is_list(names) do
# IO.puts "list of names in #{lang}"
# IO.inspect names
:need_to_build
end
def greet(), do: "Hello Nobody!"
@doc """
An example of some quick string cleanup
Trims whitespace and line breaks
Cleans strange character encodings
Forces name to capitalize all words
## Examples
iex> Greetings.clean_name(" jes ")
"Jes"
iex> Greetings.clean_name("")
"Mystery Person"
iex> Greetings.clean_name(nil)
"Nobody"
iex> Greetings.clean_name(:atom)
"atom"
"""
# Task: when you get "", return "Mystery Person"
# Task: when you get nil, return "Nobody"
# Task: when you get a name which is an atom, transform it into a string
# Hint: use more function definitions, do not edit the below definition
def clean_name(name) when is_bitstring(name) do
name
|> String.trim
|> String.normalize(:nfc)
|> String.split(" ")
|> Enum.map(&String.capitalize/1)
|> Enum.join(" ")
end
end
|
03_greetings/lib/greetings.ex
| 0.708414
| 0.46035
|
greetings.ex
|
starcoder
|
defmodule Commanded.EventStore.Adapter do
@moduledoc """
Defines the behaviour to be implemented by an event store adapter to be used by Commanded.
"""
alias Commanded.EventStore.{EventData, RecordedEvent, SnapshotData}
@type adapter_meta :: map
@type application :: Commanded.Application.t()
@type config :: Keyword.t()
@type stream_uuid :: String.t()
@type start_from :: :origin | :current | integer
@type expected_version :: :any_version | :no_stream | :stream_exists | non_neg_integer
@type subscription_name :: String.t()
@type subscription :: any
@type subscriber :: pid
@type source_uuid :: String.t()
@type error :: term
@doc """
Return a child spec defining all processes required by the event store.
"""
@callback child_spec(application, config) ::
{:ok, [:supervisor.child_spec() | {module, term} | module], adapter_meta}
@doc """
Append one or more events to a stream atomically.
"""
@callback append_to_stream(
adapter_meta,
stream_uuid,
expected_version,
events :: list(EventData.t())
) ::
:ok
| {:error, :wrong_expected_version}
| {:error, error}
@doc """
Streams events from the given stream, in the order in which they were
originally written.
"""
@callback stream_forward(
adapter_meta,
stream_uuid,
start_version :: non_neg_integer,
read_batch_size :: non_neg_integer
) ::
Enumerable.t()
| {:error, :stream_not_found}
| {:error, error}
@doc """
Create a transient subscription to a single event stream.
The event store will publish any events appended to the given stream to the
`subscriber` process as an `{:events, events}` message.
The subscriber does not need to acknowledge receipt of the events.
"""
@callback subscribe(adapter_meta, stream_uuid | :all) ::
:ok | {:error, error}
@doc """
Create a persistent subscription to an event stream.
"""
@callback subscribe_to(
adapter_meta,
stream_uuid | :all,
subscription_name,
subscriber,
start_from
) ::
{:ok, subscription}
| {:error, :subscription_already_exists}
| {:error, error}
@doc """
Acknowledge receipt and successful processing of the given event received from
a subscription to an event stream.
"""
@callback ack_event(adapter_meta, pid, RecordedEvent.t()) :: :ok
@doc """
Unsubscribe an existing subscriber from event notifications.
This should not delete the subscription.
"""
@callback unsubscribe(adapter_meta, subscription) :: :ok
@doc """
Delete an existing subscription.
"""
@callback delete_subscription(
adapter_meta,
stream_uuid | :all,
subscription_name
) ::
:ok | {:error, :subscription_not_found} | {:error, error}
@doc """
Read a snapshot, if available, for a given source.
"""
@callback read_snapshot(adapter_meta, source_uuid) ::
{:ok, SnapshotData.t()} | {:error, :snapshot_not_found}
@doc """
Record a snapshot of the data and metadata for a given source
"""
@callback record_snapshot(adapter_meta, SnapshotData.t()) ::
:ok | {:error, error}
@doc """
Delete a previously recorded snapshot for a given source
"""
@callback delete_snapshot(adapter_meta, source_uuid) ::
:ok | {:error, error}
end
|
lib/commanded/event_store/adapter.ex
| 0.86592
| 0.415936
|
adapter.ex
|
starcoder
|
defmodule VistaClient.Transformations.Serializer do
@moduledoc ~S"""
Convert VistaClient's tuple-based internal data structures of
- "films in a cinema on a day with their sessions" and
- "films in a cinema in a week with their sessions by day"
into Jason-digestible maps.
## Examples
iex> alias VistaClient.{Film,Session,Transformations}
iex> day_result = [{film, [session]}] = [{%Film{}, [%Session{}]}]
iex> Transformations.Serializer.from_day_result(day_result)
[
%{
"film" => %VistaClient.Film{id: nil, name: nil, rating: nil},
"sessions" => [
%VistaClient.Session{
attributes: nil, cinema: nil, cinema_id: nil, date: nil, film: nil, film_id_string: nil, id_string: nil, showtime: nil, version: nil
}
]
}
]
iex> day = ~D[2019-01-01]
iex> week_result = [{film, [{day, [session]}]}]
iex> Transformations.Serializer.from_week_result(week_result)
[
%{
"days" => %{
"2019-01-01" => [
%VistaClient.Session{attributes: nil, cinema: nil, cinema_id: nil, date: nil, film: nil, film_id_string: nil, id_string: nil, showtime: nil, version: nil}
]
},
"film" => %VistaClient.Film{id: nil, name: nil, rating: nil}
}
]
"""
def from_week_result(films), do: film_screening_tuples(films)
def from_day_result(films), do: film_sessions_tuples(films)
defp film_screening_tuples(films) do
Enum.map(
films,
fn {film, dst = _day_session_tuples} ->
%{"film" => film, "days" => day_session_tuples(dst)}
end
)
end
defp day_session_tuples(day_session_tuples) do
Enum.reduce(
day_session_tuples,
%{},
fn {day, sessions}, map ->
iso = Date.to_iso8601(day)
Map.put(map, iso, sessions)
end
)
end
defp film_sessions_tuples(film_tuples) do
Enum.map(
film_tuples,
fn {film, sessions} -> %{"film" => film, "sessions" => sessions} end
)
end
end
|
lib/transformations/serializer.ex
| 0.523177
| 0.425844
|
serializer.ex
|
starcoder
|
defmodule Semaphore do
alias :ets, as: ETS
@table :semaphore
@call_safe_table :semaphore_call_safe
## Application Callbacks
use GenServer
def start(_type, _args) do
import Supervisor.Spec, warn: false
Supervisor.start_link([worker(__MODULE__, [])], strategy: :one_for_one)
end
def start_link() do
sweep_interval = Application.get_env(:semaphore, :sweep_interval, 5_000)
GenServer.start_link(__MODULE__, sweep_interval, name: __MODULE__)
end
## Client API
@doc """
Acquire a semaphore, incrementing the internal count by one.
"""
@spec acquire(term, integer) :: boolean
def acquire(name, max) do
case ETS.update_counter(@table, name, [{2, 0}, {2, 1, max, max}], {name, 0}) do
[^max, _] -> false
_ -> true
end
end
@doc """
Release a semaphore, decrementing the internal count by one.
"""
@spec release(term) :: :ok
def release(name) do
ETS.update_counter(@table, name, {2, -1, 0, 0})
:ok
end
@doc """
Number of acquired semaphores.
"""
@spec count(term) :: integer
def count(name) do
case ETS.lookup(@table, name) do
[{_, count}] -> count
_ -> 0
end
end
@doc """
Reset sempahore to a specific count.
"""
@spec reset(term, integer) :: :ok
def reset(name, count \\ 0) do
ETS.update_element(@table, name, {2, count})
:ok
end
@doc """
Attempt to acquire a semaphore and call a function and then automatically release.
"""
@spec call(term, integer, function) :: term | {:error, :max}
def call(_name, -1, func), do: func.()
def call(_name, 0, _func), do: {:error, :max}
def call(name, max, func) do
if acquire(name, max) do
try do
func.()
after
release(name)
end
else
{:error, :max}
end
end
@doc """
Attempt to acquire a semaphore and call a function that might link to another process, and then automatically release.
If the current process dies in a way that is unable to be caught by the try block (e.g. a linked process dies, while
`func` is being called. The semaphore will be automatically released by the sweeper in the background.
This function has higher overhead than `call/3` and should only be used if you know that you might be linking to
something in the func.
"""
@spec call_linksafe(term, integer, function) :: term | {:error, :max}
def call_linksafe(_name, -1, func), do: func.()
def call_linksafe(_name, 0, _func), do: {:error, :max}
def call_linksafe(name, max, func) do
if acquire(name, max) do
safe_key = {name, self()}
inserted = ETS.insert_new(@call_safe_table, [safe_key])
try do
func.()
after
if inserted do
ETS.delete(@call_safe_table, safe_key)
end
release(name)
end
else
{:error, :max}
end
end
## Private
def init(sweep_interval) do
ETS.new(@table, [:set, :public, :named_table, {:write_concurrency, true}])
ETS.new(@call_safe_table, [:set, :public, :named_table, {:write_concurrency, true}])
{:ok, sweep_interval, sweep_interval}
end
def handle_info(:timeout, sweep_interval) do
do_sweep()
{:noreply, sweep_interval, sweep_interval}
end
defp do_sweep() do
ETS.foldl(
fn ({name, pid} = key, :ok) ->
with false <- Process.alive?(pid),
1 <- :ets.select_delete(@call_safe_table, [{key, [], [true]}]) do
release(name)
else
_ -> :ok
end
end,
:ok,
@call_safe_table
)
end
end
|
lib/semaphore.ex
| 0.621426
| 0.411466
|
semaphore.ex
|
starcoder
|
defmodule ExWire.Packet.Protocol.Hello do
@moduledoc """
This packet establishes capabilities and etc between two peer to peer
clients. This is generally required to be the first signed packet communicated
after the handshake is complete.
```
**Hello** `0x00` [`p2pVersion`: `P`, `clientId`: `B`, [[`cap1`: `B_3`, `capVersion1`: `P`], [`cap2`: `B_3`, `capVersion2`: `P`], ...], `listenPort`: `P`, `nodeId`: `B_64`]
First packet sent over the connection, and sent once by both sides. No other messages
may be sent until a `Hello` is received.
* `p2pVersion` Specifies the implemented version of the P2P protocol. Now must be 1.
* `clientId` Specifies the client software identity, as a human-readable string (e.g. "Ethereum(++)/1.0.0").
* `cap` Specifies a peer capability name as a length-3 ASCII string. Current supported capabilities are eth, shh.
* `capVersion` Specifies a peer capability version as a positive integer.
* `listenPort` specifies the port that the client is listening on (on the interface that the present connection traverses). If 0 it indicates the client is not listening.
* `nodeId` is the Unique Identity of the node and specifies a 512-bit hash that identifies this node.
```
"""
require Logger
alias ExWire.Packet.Capability
alias ExWire.Packet.Capability.Mana
@behaviour ExWire.Packet
@type t :: %__MODULE__{
p2p_version: non_neg_integer(),
client_id: String.t(),
caps: [Capability.t()],
listen_port: integer(),
node_id: ExWire.node_id()
}
defstruct [
:p2p_version,
:client_id,
:caps,
:listen_port,
:node_id
]
@impl true
@spec message_id_offset() :: 0
def message_id_offset() do
0x00
end
@doc """
Given a Hello packet, serializes for transport over Eth Wire Protocol.
## Examples
iex> %ExWire.Packet.Protocol.Hello{p2p_version: 10, client_id: "Mana/Test", caps: [ExWire.Packet.Capability.new({"eth", 1}), ExWire.Packet.Capability.new({"par", 2})], listen_port: 5555, node_id: <<5>>}
...> |> ExWire.Packet.Protocol.Hello.serialize
[10, "Mana/Test", [["eth", 1], ["par", 2]], 5555, <<5>>]
"""
@impl true
@spec serialize(t) :: ExRLP.t()
def serialize(packet = %__MODULE__{}) do
[
packet.p2p_version,
packet.client_id,
for(%Capability{name: cap, version: ver} <- packet.caps, do: [cap, ver]),
packet.listen_port,
packet.node_id
]
end
@doc """
Given an RLP-encoded Hello packet from Eth Wire Protocol,
decodes into a Hello struct.
## Examples
iex> ExWire.Packet.Protocol.Hello.deserialize([<<10>>, "Mana/Test", [["eth", <<1>>], ["par", <<2>>]], <<55>>, <<5>>])
%ExWire.Packet.Protocol.Hello{p2p_version: 10, client_id: "Mana/Test", caps: [ExWire.Packet.Capability.new({"eth", 1}), ExWire.Packet.Capability.new({"par", 2})], listen_port: 55, node_id: <<5>>}
"""
@impl true
@spec deserialize(ExRLP.t()) :: t
def deserialize(rlp) do
[
p2p_version,
client_id,
caps,
listen_port,
node_id
] = rlp
%__MODULE__{
p2p_version: p2p_version |> :binary.decode_unsigned(),
client_id: client_id,
caps: for([cap, ver] <- caps, do: Capability.new({cap, ver |> :binary.decode_unsigned()})),
listen_port: listen_port |> :binary.decode_unsigned(),
node_id: node_id
}
end
@doc """
Handles a Hello message. We can mark a peer as active for communication
after we receive this message.
## Examples
# Matching caps
iex> caps = [ExWire.Packet.Capability.new({"eth", 62}), ExWire.Packet.Capability.new({"par", 2})]
iex> %ExWire.Packet.Protocol.Hello{p2p_version: 10, client_id: "Mana/Test", caps: caps, listen_port: 5555, node_id: <<5>>}
...> |> ExWire.Packet.Protocol.Hello.handle()
{:activate, [ExWire.Packet.Capability.new({"eth", 62}), ExWire.Packet.Capability.new({"par", 2})], 10}
# No matching caps
iex> caps = [ExWire.Packet.Capability.new({"eth", 1}), ExWire.Packet.Capability.new({"par", 2})]
iex> %ExWire.Packet.Protocol.Hello{p2p_version: 10, client_id: "Mana/Test", caps: caps, listen_port: 5555, node_id: <<5>>}
...> |> ExWire.Packet.Protocol.Hello.handle()
{:disconnect, :useless_peer, [ExWire.Packet.Capability.new({"eth", 1}), ExWire.Packet.Capability.new({"par", 2})], 10}
# When no caps
iex> %ExWire.Packet.Protocol.Hello{p2p_version: 10, client_id: "Mana/Test", caps: [], listen_port: 5555, node_id: <<5>>}
...> |> ExWire.Packet.Protocol.Hello.handle()
{:disconnect, :useless_peer, [], 10}
"""
@impl true
@spec handle(ExWire.Packet.packet()) :: ExWire.Packet.handle_response()
def handle(packet = %__MODULE__{}) do
Exth.trace(fn -> "[Packet] Got Hello: #{inspect(packet)}" end)
if Capability.get_matching_capabilities(packet.caps, Mana.get_our_capabilities_map()) == [] do
_ =
Logger.debug(fn ->
"[Packet] Disconnecting due to no matching peer caps (#{inspect(packet.caps)})"
end)
{:disconnect, :useless_peer, packet.caps, packet.p2p_version}
else
# TODO: Add a bunch more checks
{:activate, packet.caps, packet.p2p_version}
end
end
end
|
apps/ex_wire/lib/ex_wire/packet/protocol/hello.ex
| 0.845321
| 0.842539
|
hello.ex
|
starcoder
|
defmodule Chess.Moves.Piece do
@moduledoc false
alias Chess.Board
alias Chess.Moves.Generator
alias Chess.Moves.Pieces.Knight
alias Chess.Moves.Pieces.Pawn
def attacked?(board, {file, rank}) do
attacked_by_rook_or_queen?(board, {file, rank}) ||
attacked_by_bishop_or_queen?(board, {file, rank}) ||
attacked_by_knight?(board, {file, rank}) ||
attacked_by_pawn?(board, {file, rank})
end
defp attacked_by_rook_or_queen?(board, {file, rank}) do
_attacked?(board, {file, rank}, {0, 1}, ["rook", "queen"]) ||
_attacked?(board, {file, rank}, {0, -1}, ["rook", "queen"]) ||
_attacked?(board, {file, rank}, {1, 0}, ["rook", "queen"]) ||
_attacked?(board, {file, rank}, {-1, 0}, ["rook", "queen"])
end
defp attacked_by_bishop_or_queen?(board, {file, rank}) do
_attacked?(board, {file, rank}, {1, 1}, ["bishop", "queen"]) ||
_attacked?(board, {file, rank}, {1, -1}, ["bishop", "queen"]) ||
_attacked?(board, {file, rank}, {-1, 1}, ["bishop", "queen"]) ||
_attacked?(board, {file, rank}, {-1, -1}, ["bishop", "queen"])
end
defp attacked_by_knight?(board, {file, rank}) do
_attacked?(board, {file, rank}, Knight.pattern, "knight")
end
defp attacked_by_pawn?(board, {file, rank}) do
colour =
board
|> Board.piece({file, rank})
|> Map.get("colour")
board
|> _attacked_by_pawn?({file, rank}, Pawn.pattern(colour))
end
defp _attacked_by_pawn?(board, {file, rank}, pattern) do
moves =
board
|> Generator.moves({file, rank}, pattern)
Enum.any?(moves, &(match_piece(board, &1, "pawn")))
end
defp _attacked?(_board, {0, _rank}, {-1, _}, _), do: false
defp _attacked?(_board, {_file, 0}, {_, -1}, _), do: false
defp _attacked?(_board, {7, _rank}, {1, _}, _), do: false
defp _attacked?(_board, {_file, 7}, {_, 1}, _), do: false
defp _attacked?(board, {file, rank}, {fv, rv}, pieces) do
board
|> Generator.moves({file, rank}, {fv, rv})
|> List.last
|> case do
{file, rank} ->
piece = board["#{file},#{rank}"]
Enum.any?(pieces, &(match?(%{"type" => ^&1}, piece)))
nil ->
false
end
end
defp _attacked?(board, {file, rank}, pattern, piece_type) do
moves =
board
|> Generator.moves({file, rank}, pattern)
Enum.any?(moves, &(match_piece(board, &1, piece_type)))
end
defp match_piece(board, {file, rank}, piece_type) do
piece =
board
|> Board.piece({file, rank})
piece["type"] == piece_type
end
end
|
lib/chess/moves/piece.ex
| 0.666822
| 0.514522
|
piece.ex
|
starcoder
|
defmodule Dicer.Lexer do
alias Dicer.Tokens
def tokenize({:ok, input}) when is_binary(input) do
_tokenize(input)
end
def tokenize(input = {:error, _}) do
input
end
defp _tokenize(input, result \\ [])
defp _tokenize("", result) do
{:ok, Enum.reverse([%Tokens.End{}] ++ result)}
end
defp _tokenize(input, result) do
case _process_next_token(input) do
{:error, message} -> {:error, message}
{token, remaining_input} -> _tokenize(remaining_input, [token | result])
end
end
defp _process_next_token(input) do
cond do
Regex.match?(Tokens.Plus.get_regex, input) ->
{%Tokens.Plus{}, String.slice(input, 1..-1)}
Regex.match?(Tokens.Minus.get_regex, input) ->
{%Tokens.Minus{}, String.slice(input, 1..-1)}
Regex.match?(Tokens.Multiply.get_regex, input) ->
{%Tokens.Multiply{}, String.slice(input, 1..-1)}
Regex.match?(Tokens.Divide.get_regex, input) ->
{%Tokens.Divide{}, String.slice(input, 1..-1)}
Regex.match?(Tokens.LeftParenthesis.get_regex, input) ->
{%Tokens.LeftParenthesis{}, String.slice(input, 1..-1)}
Regex.match?(Tokens.RightParenthesis.get_regex, input) ->
{%Tokens.RightParenthesis{}, String.slice(input, 1..-1)}
Regex.match?(Tokens.RightParenthesis.get_regex, input) ->
{%Tokens.RightParenthesis{}, String.slice(input, 1..-1)}
Regex.match?(Tokens.TakeTop.get_regex, input) ->
_process_and_create_take_top_token(input)
Regex.match?(Tokens.TakeBottom.get_regex, input) ->
_process_and_create_take_bottom_token(input)
Regex.match?(Tokens.Dice.get_regex, input) ->
_process_and_create_dice_token(input)
Regex.match?(Tokens.FudgeDice.get_regex, input) ->
_process_and_create_fudge_dice_token(input)
Regex.match?(Tokens.Num.get_regex, input) ->
[num_str | _tail] = Regex.run(Tokens.Num.get_regex, input)
{%Tokens.Num{value: num_str}, String.slice(input, String.length(num_str)..-1)}
String.length(input) == 0 -> {%Tokens.End{}, ""}
true -> {:error, ["Input has unrecognized characters!"]}
end
end
defp _process_and_create_take_top_token(input) do
[top_str, quantity] = Regex.run(Tokens.TakeTop.get_regex, input)
{q, _} = Integer.parse quantity
{%Tokens.TakeTop{take_num: q }, String.slice(input, String.length(top_str)..-1)}
end
defp _process_and_create_take_bottom_token(input) do
[bottom_str, quantity] = Regex.run(Tokens.TakeBottom.get_regex, input)
{q, _} = Integer.parse quantity
{%Tokens.TakeBottom{take_num: q }, String.slice(input, String.length(bottom_str)..-1)}
end
defp _process_and_create_dice_token(input) do
[dice_str, quantity, sides] = Regex.run(Tokens.Dice.get_regex, input)
{s, _} = Integer.parse sides
case quantity do
"" ->
{%Tokens.Dice{quantity: 1, sides: s }, String.slice(input, String.length(dice_str)..-1)}
_ ->
{result, _} = Integer.parse quantity
{%Tokens.Dice{quantity: result, sides: s }, String.slice(input, String.length(dice_str)..-1)}
end
end
defp _process_and_create_fudge_dice_token(input) do
[dice_str, quantity] = Regex.run(Tokens.FudgeDice.get_regex, input)
case quantity do
"" ->
{%Tokens.FudgeDice{quantity: 1}, String.slice(input, String.length(dice_str)..-1)}
_ ->
{result, _} = Integer.parse quantity
{%Tokens.FudgeDice{quantity: result}, String.slice(input, String.length(dice_str)..-1)}
end
end
end
|
lib/dicer/lexer.ex
| 0.564098
| 0.489564
|
lexer.ex
|
starcoder
|
defmodule Makeup.Styles.HTML.StyleMap do
@moduledoc """
This module contains all styles, and facilities to map style names (binaries or atoms) to styles.
Style names are of the form `<name>_style`.
"""
alias Makeup.Styles.HTML
# %% Start Pygments %%
@doc """
The *abap* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#abap).
"""
def abap_style, do: HTML.AbapStyle.style()
@doc """
The *algol* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#algol).
"""
def algol_style, do: HTML.AlgolStyle.style()
@doc """
The *algol_nu* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#algol_nu).
"""
def algol_nu_style, do: HTML.Algol_NuStyle.style()
@doc """
The *arduino* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#arduino).
"""
def arduino_style, do: HTML.ArduinoStyle.style()
@doc """
The *autumn* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#autumn).
"""
def autumn_style, do: HTML.AutumnStyle.style()
@doc """
The *borland* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#borland).
"""
def borland_style, do: HTML.BorlandStyle.style()
@doc """
The *bw* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#bw).
"""
def bw_style, do: HTML.BlackWhiteStyle.style()
@doc """
The *colorful* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#colorful).
"""
def colorful_style, do: HTML.ColorfulStyle.style()
@doc """
The *default* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#default).
"""
def default_style, do: HTML.DefaultStyle.style()
@doc """
The *emacs* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#emacs).
"""
def emacs_style, do: HTML.EmacsStyle.style()
@doc """
The *friendly* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#friendly).
"""
def friendly_style, do: HTML.FriendlyStyle.style()
@doc """
The *fruity* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#fruity).
"""
def fruity_style, do: HTML.FruityStyle.style()
@doc """
The *igor* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#igor).
"""
def igor_style, do: HTML.IgorStyle.style()
@doc """
The *lovelace* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#lovelace).
"""
def lovelace_style, do: HTML.LovelaceStyle.style()
@doc """
The *manni* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#manni).
"""
def manni_style, do: HTML.ManniStyle.style()
@doc """
The *monokai* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#monokai).
"""
def monokai_style, do: HTML.MonokaiStyle.style()
@doc """
The *murphy* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#murphy).
"""
def murphy_style, do: HTML.MurphyStyle.style()
@doc """
The *native* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#native).
"""
def native_style, do: HTML.NativeStyle.style()
@doc """
The *paraiso_dark* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#paraiso_dark).
"""
def paraiso_dark_style, do: HTML.ParaisoDarkStyle.style()
@doc """
The *paraiso_light* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#paraiso_light).
"""
def paraiso_light_style, do: HTML.ParaisoLightStyle.style()
@doc """
The *pastie* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#pastie).
"""
def pastie_style, do: HTML.PastieStyle.style()
@doc """
The *perldoc* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#perldoc).
"""
def perldoc_style, do: HTML.PerldocStyle.style()
@doc """
The *rainbow_dash* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#rainbow_dash).
"""
def rainbow_dash_style, do: HTML.RainbowDashStyle.style()
@doc """
The *rrt* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#rrt).
"""
def rrt_style, do: HTML.RrtStyle.style()
@doc """
The *tango* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#tango).
"""
def tango_style, do: HTML.TangoStyle.style()
@doc """
The *trac* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#trac).
"""
def trac_style, do: HTML.TracStyle.style()
@doc """
The *vim* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#vim).
"""
def vim_style, do: HTML.VimStyle.style()
@doc """
The *vs* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#vs).
"""
def vs_style, do: HTML.VisualStudioStyle.style()
@doc """
The *xcode* style. Example [here](https://tmbb.github.io/makeup_demo/elixir.html#xcode).
"""
def xcode_style, do: HTML.XcodeStyle.style()
# %% End Pygments %%
# Custom themes:
@doc """
The *samba* style, based on the tango style, but with visual distinction between
classes and variables, and lighter punctuation.
"""
def samba_style, do: HTML.SambaStyle.style()
end
|
deps/makeup/lib/makeup/styles/html/style_map.ex
| 0.731538
| 0.565839
|
style_map.ex
|
starcoder
|
defmodule Alchemy.Guild do
alias Alchemy.{Channel, User, Voice, VoiceState}
alias Alchemy.Guild.{Emoji, GuildMember, Integration, Presence, Role}
import Alchemy.Structs
@moduledoc """
Guilds represent a collection of users in a "server". This module contains
information about the types, and subtypes related to guilds, as well
as some useful functions related to them.
"""
@type snowflake :: String.t
@typedoc """
An iso_8601 timestamp.
"""
@type timestamp :: String.t
@typedoc """
Represents a guild.
- `id`
The id of this guild.
- `name`
The name of this guild.
- `icon`
The image hash of the icon image.
- `splash`
The image hash of the splash image. Not a lot of guilds have a hash.
- `owner_id`
The user id of the guild's owner.
- `region`
The region of the guild.
- `afk_channel_id`
The id of the afk channel, if the guild has one.
- `afk_timeout`
The afk timeout in seconds.
- `embed_enabled`
Whether this guild is embeddable.
- `verification_level`
The level of verification this guild requires.
- `default_message_notifications`
The default message notifications level.
- `roles`
A list of the roles in this server.
- `emojis`
A list of custom emojis in this server.
- `features`
A list of guild features.
- `mfa_level`
The required mfa level for the guild.
The following fields will be missing for guilds accessed from outside the Cache:
- `joined_at`
The timestamp of guild creation.
- `large`
Whether or not this guild is considered "large".
- `unavailable`
This should never be true for guilds.
- `member_count`
The number of members a guild contains.
- `voice_states`
A list of voice states of the guild.
- `members`
A list of members in the guild.
- `channels`
A list of channels in the guild.
- `presences`
A list of presences in the guild.
"""
@type t :: %__MODULE__{
id: snowflake,
name: String.t,
icon: String.t,
splash: String.t | nil,
owner_id: snowflake,
region: String.t,
afk_channel_id: String.t | nil,
afk_timeout: Integer,
embed_enabled: Boolean,
verification_level: Integer,
default_message_notifications: Integer,
roles: [Guild.role],
emojis: [emoji],
features: [String.t],
mfa_level: Integer,
joined_at: timestamp,
large: Boolean,
unavailable: Boolean,
member_count: Integer,
voice_states: [Voice.state],
members: [member],
channels: [Channel.t],
presences: [Presence.t]
}
defstruct [:id,
:name,
:icon,
:splash,
:owner_id,
:region,
:afk_channel_id,
:afk_timeout,
:embed_enabled,
:verification_level,
:default_message_notifications,
:roles,
:emojis,
:features,
:mfa_level,
:joined_at,
:large,
:unavailable,
:member_count,
:voice_states,
:members,
:channels,
:presences
]
@typedoc """
Represents a member in a guild.
- `user`
A user struct containing information about the underlying user.
- `nick`
An optional nickname for this member.
- `roles`
A list of ids corresponding to roles the member has.
- `joined_at`
The timestamp of when this member joined the guild.
- `deaf`
Whether the user is currently deafened.
- `mute`
Whether the user is currently muted.
"""
@type member :: %GuildMember{
user: User.t,
nick: String.t | nil,
roles: [snowflake],
joined_at: timestamp,
deaf: Boolean,
mute: Boolean
}
@typedoc """
Represents a custom emoji in a guild.
The string representation of this struct will be the markdown
necessary to use it. i.e. `Cogs.say("\#{emoji}")` will send the emoji.
- `id`
The id of this emoji.
- `name`
The name of this emoji.
- `roles`
A list of role ids who can use this role.
- `require_colons`
Whether or not this emoji must be wrapped in colons.
- `managed`
Whether or not this emoji is managed.
"""
@type emoji :: %Emoji{
id: String.t,
name: String.t,
roles: [String.t],
require_colons: Boolean,
managed: Boolean
}
@typedoc """
Represents the account of an integration.
- `id`
The id of the account.
- `name`
The name of the account.
"""
@type integration_account :: %Integration.Account{
id: snowflake,
name: String.t
}
@typedoc """
Represents an guild's integration with a service, (i.e. twitch)
- `id`
The id of the integration.
- `name`
The name of the integration.
- `type`
Integration type; youtube, twitch, etc.
- `enabled`
Whether or not the integration is enabled.
- `syncing`
Whether or not the integration is syncing.
- `role_id`
The id of the role associated with "subscribers" to this integration.
- `expire_behaviour`
The behaviour of expiring subscribers.
- `expire_grace_period`
The grace period before expiring subscribers.
- `user`
The user for this integration.
- `account`
The integration's account information.
- `synced_at`
When this integration was last synced.
"""
@type integration :: %Integration{
id: snowflake,
name: String.t,
type: String.t,
enabled: Boolean,
syncing: Boolean,
role_id: snowflake,
expire_behaviour: Integer,
expire_grace_period: Integer,
user: User.t,
account: integration_account,
synced_at: timestamp
}
@typedoc """
Represents a role in a guild.
- `id`
The id of the role.
- `name`
The name of the role.
- `color`
The color of the role.
- `hoist`
Whether the role is "hoisted" above others in the sidebar.
- `position`
The position of the role in a guild.
- `permissions`
The bitset of permissions for this role. See the `Permissions` module
for more information.
- `managed`
Whether this role is managed by an integration.
- `mentionable`
Whether this role is mentionable.
"""
@type role :: %Role{
id: snowflake,
name: String.t,
color: Integer,
hoist: Boolean,
position: Integer,
permissions: Integer,
managed: Boolean,
mentionable: Boolean,
}
@typedoc """
Represents the presence of a user in a guild.
- `user`
The user this presence is for.
- `roles`
A list of role ids this user belongs to.
- `game`
The current activity of the user, or `nil`.
- `guild_id`
The id of the guild this presences is in.
- `status`
"idle", "online", or "offline"
"""
@type presence :: %Presence{
user: User.t,
roles: [snowflake],
game: String.t | nil,
guild_id: snowflake,
status: String.t
}
@doc """
Finds the highest ranked role of a member in a guild.
This is useful, because the permissions and color
of the highest role are the ones that apply to that member.
"""
@spec highest_role(t, member) :: role
def highest_role(guild, member) do
guild.roles
|> Enum.sort_by(& &1.position)
# never null because of the @everyone role
|> Enum.find(& &1 in member.roles)
end
defmacrop is_valid_guild_icon_url(type, size) do
quote do
unquote(type) in ["jpg", "jpeg", "png", "webp"] and
unquote(size) in [128, 256, 512, 1024, 2048]
end
end
@doc """
Get the icon image URL for the given guild.
If the guild does not have any icon, returns `nil`.
## Parameters
- `type`: The returned image format. Can be any of `jpg`, `jpeg`, `png`, or `webp`.
- `size`: The desired size of the returned image. Must be a power of two.
If the parameters do not match these conditions, an `ArgumentError` is raised.
"""
@spec icon_url(__MODULE__.t, String.t, 16..2048) :: String.t
def icon_url(guild, type \\ "png", size \\ 256) when is_valid_guild_icon_url(type, size) do
case guild.icon do
nil -> nil
hash -> "https://cdn.discordapp.com/icons/#{guild.id}/#{hash}.#{type}?size=#{size}"
end
end
def icon_url(_guild, _type, _size) do
raise ArgumentError, message: "invalid icon URL type and / or size"
end
@doc false
def from_map(map) do
map
|> field_map("roles", &(map_struct &1, Role))
|> field_map("emojis", &(map_struct &1, Emoji))
|> field_map?("voice_states", &(map_struct &1, VoiceState))
|> fields_from_map?("members", GuildMember)
|> fields_from_map?("channels", Channel)
|> fields_from_map?("presences", Presence)
|> to_struct(__MODULE__)
end
end
|
lib/Structs/Guild/guild.ex
| 0.864053
| 0.474327
|
guild.ex
|
starcoder
|
defmodule Plug.Builder do
alias Plug.Conn
@moduledoc """
Conveniences for building plugs.
This module can be used into a module in order to build
a plug pipeline:
defmodule MyApp do
use Plug.Builder
plug Plug.Logger
plug :hello, upper: true
def hello(conn, opts) do
body = if opts[:upper], do: "WORLD", else: "world"
send_resp(conn, 200, body)
end
end
Multiple plugs can be defined with the `plug/2` macro, forming a
pipeline. `Plug.Builder` also imports the `Plug.Conn` module, making
functions like `send_resp/3` available.
## Plug behaviour
Internally, `Plug.Builder` implements the `Plug` behaviour, which means
both `init/1` and `call/2` functions are defined. By implementing the
Plug API, `Plug.Builder` guarantees this module can be handed to a web
server or used as part of another pipeline.
## Halting a Plug pipeline
A Plug pipeline can be halted with `Plug.Conn.halt/1`. The builder will
prevent further plugs downstream from being invoked and return the current
connection.
"""
@type plug :: module | atom
@doc false
defmacro __using__(_) do
quote do
@behaviour Plug
def init(opts) do
opts
end
def call(conn, opts) do
plug_builder_call(conn, opts)
end
defoverridable [init: 1, call: 2]
import Plug.Conn
import Plug.Builder, only: [plug: 1, plug: 2]
Module.register_attribute(__MODULE__, :plugs, accumulate: true)
@before_compile Plug.Builder
end
end
@doc false
defmacro __before_compile__(env) do
plugs = Module.get_attribute(env.module, :plugs)
if plugs == [] do
raise "not plugs have been defined in #{__MODULE__}"
end
{conn, body} = Plug.Builder.compile(plugs)
quote do
defp plug_builder_call(unquote(conn), _), do: unquote(body)
end
end
@doc """
A macro that stores a new plug.
"""
defmacro plug(plug, opts \\ []) do
quote do
@plugs {unquote(plug), unquote(opts), true}
end
end
@doc """
Compiles a plug pipeline.
It expects a reversed pipeline (with the last plug coming first)
and returns a tuple containing the reference to the connection
as first argument and the compiled quote pipeline.
"""
@spec compile([{plug, Plug.opts}]) :: {Macro.t, Macro.t}
def compile(pipeline) do
conn = quote do: conn
{conn, Enum.reduce(pipeline, conn, "e_plug(init_plug(&1), &2))}
end
defp init_plug({plug, opts, guard}) do
case Atom.to_char_list(plug) do
'Elixir.' ++ _ ->
init_module_plug(plug, opts, guard)
_ ->
init_fun_plug(plug, opts, guard)
end
end
defp init_module_plug(plug, opts, guard) do
opts = plug.init(opts)
if function_exported?(plug, :call, 2) do
{:call, plug, opts, guard}
else
raise ArgumentError, message: "#{inspect plug} plug must implement call/2"
end
end
defp init_fun_plug(plug, opts, guard) do
{:fun, plug, opts, guard}
end
defp quote_plug({:call, plug, opts, guard}, acc) do
call = quote do: unquote(plug).call(conn, unquote(Macro.escape(opts)))
quote do
case unquote(compile_guard(call, guard)) do
%Conn{halted: true} = conn -> conn
%Conn{} = conn -> unquote(acc)
_ -> raise "expected #{unquote(inspect plug)}.call/2 to return a Plug.Conn"
end
end
end
defp quote_plug({:fun, plug, opts, guard}, acc) do
call = quote do: unquote(plug)(conn, unquote(Macro.escape(opts)))
quote do
case unquote(compile_guard(call, guard)) do
%Conn{halted: true} = conn -> conn
%Conn{} = conn -> unquote(acc)
_ -> raise "expected #{unquote(plug)}/2 to return a Plug.Conn"
end
end
end
defp compile_guard(call, true) do
call
end
defp compile_guard(call, guard) do
quote do
case true do
true when unquote(guard) -> unquote(call)
true -> conn
end
end
end
end
|
lib/plug/builder.ex
| 0.835886
| 0.425456
|
builder.ex
|
starcoder
|
defmodule SanbaseWeb.Graphql.DocumentProvider do
@moduledoc ~s"""
Custom Absinthe DocumentProvider for more effective caching.
Absinthe phases have one main difference compared to plugs - all phases must run
and cannot be halted. But phases can be jumped over by returning
`{:jump, result, destination_phase}`
This module makes use of 2 new phases - a `CacheDocument` phase and `Idempotent`
phase.
If the value is present in the cache it is put in the blueprint and the execution
jumps to the Idempotent phase, effectively skipping the Absinthe's Resolution
and Result phases. Result is the last phase in the pipeline so the Idempotent
phase is inserted after it.
If the value is not present in the cache, the Absinthe's default Resolution and
Result phases are being executed and the new DocumentCache and Idempotent phases
are doing nothing.
In the end there's a `before_send` hook that adds the result into the cache.
"""
@behaviour Absinthe.Plug.DocumentProvider
alias SanbaseWeb.Graphql.Cache
@doc false
@impl true
def pipeline(%Absinthe.Plug.Request.Query{pipeline: pipeline}) do
pipeline
|> Absinthe.Pipeline.insert_before(
Absinthe.Phase.Document.Complexity.Analysis,
SanbaseWeb.Graphql.Phase.Document.Complexity.Preprocess
)
|> Absinthe.Pipeline.insert_before(
Absinthe.Phase.Document.Execution.Resolution,
SanbaseWeb.Graphql.Phase.Document.Execution.CacheDocument
)
|> Absinthe.Pipeline.insert_after(
Absinthe.Phase.Document.Result,
SanbaseWeb.Graphql.Phase.Document.Execution.Idempotent
)
end
@doc false
@impl true
def process(%Absinthe.Plug.Request.Query{document: nil} = query, _), do: {:cont, query}
def process(%Absinthe.Plug.Request.Query{document: _} = query, _), do: {:halt, query}
end
defmodule SanbaseWeb.Graphql.Phase.Document.Execution.CacheDocument do
@moduledoc ~s"""
Custom phase for obtaining the result from cache.
In case the value is not present in the cache, the default Resolution and Result
phases are ran. Otherwise the custom Resolution phase is ran and Result is jumped
over.
When calculating the cache key only some of the fields in the whole blueprint are
taken into account. They are defined in the module attribute @cache_fields
The only values that are converted to something else during constructing
of the cache key are:
- DateTime - It is rounded by TTL so all datetiems in a range yield the same cache key
- Struct - All structs are converted to plain maps
"""
use Absinthe.Phase
alias SanbaseWeb.Graphql.Cache
@compile inline: [add_cache_key_to_blueprint: 2, queries_in_request: 1]
@cached_queries SanbaseWeb.Graphql.AbsintheBeforeSend.cached_queries()
@spec run(Absinthe.Blueprint.t(), Keyword.t()) :: Absinthe.Phase.result_t()
def run(bp_root, _) do
queries_in_request = queries_in_request(bp_root)
case Enum.any?(queries_in_request, &(&1 in @cached_queries)) do
false ->
{:ok, bp_root}
true ->
context = bp_root.execution.context
# Add keys that can affect the data the user can have access to
additional_keys_hash =
{context.permissions, context.product_id, context.auth.subscription, context.auth.plan,
context.auth.auth_method}
|> Sanbase.Cache.hash()
cache_key =
SanbaseWeb.Graphql.Cache.cache_key(
{"bp_root", additional_keys_hash},
santize_blueprint(bp_root),
ttl: 120,
max_ttl_offset: 120
)
bp_root = add_cache_key_to_blueprint(bp_root, cache_key)
case Cache.get(cache_key) do
nil ->
{:ok, bp_root}
result ->
# Storing it again `touch`es it and the TTL timer is restarted.
# This can lead to infinite storing the same value
Process.put(:do_not_cache_query, true)
{:jump, %{bp_root | result: result},
SanbaseWeb.Graphql.Phase.Document.Execution.Idempotent}
end
end
end
# Private functions
defp queries_in_request(%{operations: operations}) do
operations
|> Enum.flat_map(fn %{selections: selections} ->
selections
|> Enum.map(fn %{name: name} -> Inflex.camelize(name, :lower) end)
end)
end
defp add_cache_key_to_blueprint(
%{execution: %{context: context} = execution} = blueprint,
cache_key
) do
%{
blueprint
| execution: %{execution | context: Map.put(context, :query_cache_key, cache_key)}
}
end
# Leave only the fields that are needed to generate the cache key
# This let's us cache with values that are interpolated into the query string itself
# The datetimes are rounded so all datetimes in a bucket generate the same
# cache key
defp santize_blueprint(%DateTime{} = dt), do: dt
defp santize_blueprint({:argument_data, _} = tuple), do: tuple
defp santize_blueprint({a, b}), do: {a, santize_blueprint(b)}
@cache_fields [
:name,
:argument_data,
:selection_set,
:selections,
:fragments,
:operations,
:alias
]
defp santize_blueprint(map) when is_map(map) do
Map.take(map, @cache_fields)
|> Enum.map(&santize_blueprint/1)
|> Map.new()
end
defp santize_blueprint(list) when is_list(list) do
Enum.map(list, &santize_blueprint/1)
end
defp santize_blueprint(data), do: data
end
defmodule SanbaseWeb.Graphql.Phase.Document.Execution.Idempotent do
@moduledoc ~s"""
A phase that does nothing and is inserted after the Absinthe's Result phase.
`CacheDocument` phase jumps to this `Idempotent` phase if it finds the needed
value in the cache so the Absinthe's Resolution and Result phases are skipped.
"""
use Absinthe.Phase
@spec run(Absinthe.Blueprint.t(), Keyword.t()) :: Absinthe.Phase.result_t()
def run(bp_root, _), do: {:ok, bp_root}
end
defmodule SanbaseWeb.Graphql.Phase.Document.Complexity.Preprocess do
use Absinthe.Phase
@spec run(Absinthe.Blueprint.t(), Keyword.t()) :: Absinthe.Phase.result_t()
def run(bp_root, _) do
bp_root.operations
|> Enum.flat_map(fn %{selections: selections} ->
selections
|> Enum.flat_map(fn
%{name: name, argument_data: %{metric: metric}} = struct ->
case name |> Inflex.underscore() do
"get_metric" ->
selections =
Enum.map(struct.selections, fn
%{name: name} -> name |> Inflex.underscore()
_ -> nil
end)
|> Enum.reject(&is_nil/1)
# Put the metric name in the list 0, 1 or 2 times, depending
# on the selections. `timeseries_data` and `aggregated_timeseries_data`
# would go through the complexity code once, remiving the metric
# name from the list both times - so it has to be there twice, while
# `timeseries_data_complexity` won't go through that path.
# `histogram_data` does not have complexity checks right now.
# This is equivalent to X -- (X -- Y) because the `--` operator
# has right to left associativity
common_parts =
selections -- selections -- ["timeseries_data", "aggregated_timeseries_data"]
Enum.map(common_parts, fn _ -> metric end)
_ ->
[]
end
_ ->
[]
end)
end)
|> case do
[_ | _] = metrics ->
Process.put(:__metric_name_from_get_metric_api__, metrics)
_ ->
:ok
end
{:ok, bp_root}
end
end
|
lib/sanbase_web/graphql/document/document_provider.ex
| 0.891256
| 0.510619
|
document_provider.ex
|
starcoder
|
defmodule Day04 do
# https://adventofcode.com/2018/day/4
def hello() do
:hello
end
def parse_datetime(rec) do
t = rec |> String.trim() |> String.split("]") |> List.first() |> String.trim("[")
(t <> ":00+0000") |> DateTime.from_iso8601() |> elem(1)
end
def compare(rec1, rec2) do
DateTime.compare(parse_datetime(rec1), parse_datetime(rec2)) == :lt
end
def get_id(record) do
[[_, id]] = Regex.scan(~r{#(\d+)}, record)
String.to_integer(id)
end
def get_minute(record) do
[[_, min]] = Regex.scan(~r{:(\d\d)}, record)
String.to_integer(min)
end
def calculate_sleep_time([], _current, result) do
result
end
def calculate_sleep_time([head | tail], current, result) do
# use cond/case here
cond do
String.contains?(head, "#") ->
calculate_sleep_time(
tail,
{get_id(head), nil, nil},
result
)
String.contains?(head, "falls asleep") ->
calculate_sleep_time(tail, {elem(current, 0), get_minute(head), nil}, result)
String.contains?(head, "wakes up") ->
calculate_sleep_time(
tail,
{elem(current, 0), nil, nil},
Map.merge(
result,
%{
elem(current, 0) =>
Map.new(elem(current, 1)..(get_minute(head) - 1), fn x -> {x, 1} end)
},
fn _k, v1, v2 ->
Map.merge(v1, v2, fn _sk, sv1, sv2 ->
sv1 + sv2
end)
end
)
)
end
end
def sleep_minutes(map) do
map |> Map.values() |> Enum.sum()
end
def solve(records) do
data = records |> sort_chronological |> calculate_sleep_time({}, Map.new())
{topid, _} =
data
|> Map.to_list()
|> Enum.map(fn {k, v} -> {k, sleep_minutes(v)} end)
|> Enum.max_by(fn {_, v} -> v end)
{top_minute, _} = Map.to_list(data[topid]) |> Enum.max_by(fn {_k, v} -> v end)
topid * top_minute
end
def sort_chronological(records) do
records |> Enum.sort(&compare/2)
end
def solve2(records) do
data = records |> sort_chronological |> calculate_sleep_time({}, Map.new())
r =
data
|> Map.to_list()
|> Enum.map(fn {id, map} ->
{id,
Map.to_list(map)
|> Enum.max_by(fn {_, count} ->
count
end)}
end)
|> Enum.max_by(fn {_, {_, count}} -> count end)
{id, {minute, _}} = r
id * minute
end
end
|
lib/day04.ex
| 0.623148
| 0.474266
|
day04.ex
|
starcoder
|
defmodule HTMLParser.HTMLNodeTree do
@moduledoc """
Represents a tree of HTML nodes
"""
alias HTMLParser.HTMLTextNode
@enforce_keys [:tag]
defstruct [:tag, :next, children: [], attrs: %{}, empty: false]
@type t :: %__MODULE__{}
@type tag :: atom()
@doc """
Builds a new `HTMLNodeTree`
"""
@spec new(tag) :: t()
def new(tag) do
%__MODULE__{tag: tag}
end
@doc """
Copies attrs map into node tree
"""
@spec put_attrs(t(), map()) :: t()
def put_attrs(%__MODULE__{} = html_node_tree, attrs) do
%__MODULE__{html_node_tree | attrs: attrs}
end
@spec put_next(t(), (() -> any())) :: t()
def put_next(%__MODULE__{} = html_text_node, next) do
%__MODULE__{html_text_node | next: next}
end
@spec next(t() | HTMLTextNode.t()) :: any()
def next(%__MODULE__{next: nil} = html_node_tree) do
html_node_tree
|> put_next(fn -> traverse_lazy(html_node_tree) end)
|> next
end
def next(%__MODULE__{next: next_fun}) when is_function(next_fun, 0) do
do_next(next_fun)
end
def next(%HTMLTextNode{next: next_fun}) when is_function(next_fun, 0) do
do_next(next_fun)
end
defp do_next(next_fun) do
case next_fun.() do
{%__MODULE__{} = next_node, next_fun} ->
put_next(next_node, next_fun)
{%HTMLTextNode{} = next_node, next_fun} ->
HTMLTextNode.put_next(next_node, next_fun)
:done ->
:done
end
end
@doc """
Adds another node tree to child list
"""
@spec add_child(t(), t() | HTMLTextNode.t()) :: t()
def add_child(%__MODULE__{children: children} = html_node_tree, child) do
%__MODULE__{html_node_tree | children: [child | children]}
end
@spec add_children(t(), [t()] | HTMLTextNode.t()) :: t()
def add_children(%__MODULE__{} = html_node_tree, children) do
%__MODULE__{html_node_tree | children: html_node_tree.children ++ children}
end
@spec put_empty(t()) :: t()
def put_empty(%__MODULE__{} = html_node_tree) do
%__MODULE__{html_node_tree | empty: true}
end
@doc """
Recursively traverses across a node tree and invokes a callback on each node
"""
@spec traverse(t() | HTMLTextNode.t(), (t() -> any())) :: :ok
def traverse(%HTMLTextNode{} = text_node, callback) do
callback.(text_node)
end
def traverse(%__MODULE__{children: children} = html_node_tree, callback) do
callback.(html_node_tree)
Enum.each(children, &traverse(&1, callback))
end
@doc """
Lazily traverses one node at a time
"""
@spec traverse_lazy(t()) :: :done | {t() | HTMLTextNode.t(), (() -> any())}
def traverse_lazy(%__MODULE__{} = html_node_tree) do
do_traverse_lazy([html_node_tree], fn -> :done end)
end
defp do_traverse_lazy([], next), do: next.()
defp do_traverse_lazy(
[%__MODULE__{children: children} = html_node_tree | remaining_nodes],
next
) do
remaining_nodes = children ++ remaining_nodes
{html_node_tree, fn -> do_traverse_lazy(remaining_nodes, next) end}
end
defp do_traverse_lazy([%HTMLTextNode{} = text_node | remaining_nodes], next) do
{text_node, fn -> do_traverse_lazy(remaining_nodes, next) end}
end
defimpl Enumerable, for: __MODULE__ do
alias HTMLParser.HTMLNodeTree
def count(_html_node_tree), do: {:error, __MODULE__}
def member?(_html_node_tree, _value), do: {:error, __MODULE__}
def slice(_html_node_tree), do: {:error, __MODULE__}
def reduce(_html_node_tree, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(%HTMLNodeTree{} = html_node, {:suspend, acc}, fun) do
{:suspended, acc, &reduce(html_node, &1, fun)}
end
def reduce(%HTMLTextNode{} = text_node, {:suspend, acc}, fun) do
{:suspended, acc, &reduce(text_node, &1, fun)}
end
def reduce(%HTMLNodeTree{} = html_node, {:cont, acc}, fun) do
do_reduce(html_node, fun, acc)
end
def reduce(%HTMLTextNode{} = text_node, {:cont, acc}, fun) do
do_reduce(text_node, fun, acc)
end
defp do_reduce(node, fun, acc) do
case HTMLNodeTree.next(node) do
:done ->
{:done, acc}
next_node ->
reduce(next_node, fun.(next_node, acc), fun)
end
end
end
end
|
lib/html_parser/html_node_tree.ex
| 0.843831
| 0.421284
|
html_node_tree.ex
|
starcoder
|
defmodule UniLoggerBackend do
@moduledoc """
A logger backend that forwards log messages to a process.
## Usage
First add the logger to the backends:
```
# config/config.exs
config :logger, :backends, [{UniLoggerBackend, :console}]
config :logger, level: :info
```
Then configure the `pid` of the process that should receive the log messages
by configuring the backend at runtime. This can be done for example from a
`GenServer` that should receive the log messages:
```
Logger.configure_backend({UniLoggerBackend, :console}, pid: self())
receive do
{level, msg, timestamp, meta} -> IO.puts "Received log"
:flush -> IO.puts "Received flush"
end
```
The registered process will then receive messages when the logger is invoked.
Therefore the registered process should implement `handle_info/2` for tuples
like `{level, msg, timestamp, meta}` and for `:flush`. `:flush` is received
when the logger is flushed by calling `Logger.flush/0`.
"""
alias UniLoggerBackend.Config
@behaviour :gen_event
@typedoc "Type for timestamps"
@type timestamp :: Logger.Formatter.time()
@typedoc "Type for metadata"
@type metadata :: Logger.metadata()
@typedoc "Type for messages"
@type msg :: any
@typedoc "Type for log levels"
@type level :: Logger.level()
@typedoc """
Type for targes.
A target can either be a `pid`, a registered process name or a function with
arity 4. The function receives the log level, the message, a timestamp and the
metadata as arguments. Processes need to implement `handle_info/2` and with
receive the same info as a tuple. Processes are also expected to implement
`handle_info/2` for `:flush` messages. These messages are intended to flush
the all pending messages.
"""
@type target :: GenServer.name() | (level, msg, timestamp, metadata -> any)
@typedoc "Options to configure the backend"
@type opt ::
{:level, level}
| {:target, target}
| {:meta, metadata}
| {:formatter, formatter}
@typedoc "Collection type for `opt`"
@type opts :: [opt]
@typedoc """
A formatter to format the log msg before sending. It can be either a
function or a tuple with a module and a function name.
The functions receives the log msg, a timestamp as a erlang time tuple and
the metadata as arguments and should return the formatted log msg.
"""
@type formatter :: {module, atom} | (level, msg, timestamp, metadata -> any)
@typedoc """
Serves as internal state of the `UniLoggerBackend` and as config.
* `level` - Specifies the log level.
* `target` - Specifies the target for the log messages.
* `meta` - Additional metadata that will be added to the metadata before
formatting.
* `name` - The name of the lggger. This cannot be overridden.
* `formatter` - A optional function that is used to format the log messages
before sending. See `formatter()`.
"""
@type state :: %Config{
level: level,
target: target,
metadata: metadata,
name: atom,
formatter: nil | formatter
}
@spec init({module, atom}) :: {:ok, state}
def init({__MODULE__, name}) do
{:ok, configure(name, [])}
end
def handle_call({:configure, opts}, %{name: name}) do
{:ok, :ok, configure(name, opts)}
end
@spec configure(atom, opts) :: state
defp configure(name, opts) do
applied_opts =
:logger
|> Application.get_env(name, [])
|> Keyword.merge(opts)
|> Keyword.put(:name, name)
Application.put_env(:logger, name, applied_opts)
struct!(Config, applied_opts)
end
# Dont flush if target is a function or a function model tupple
def handle_event(:flush, %{target: target} = state)
when is_function(target)
when is_tuple(target) do
{:ok, state}
end
def handle_event(:flush, state) do
if process_alive?(state.target) do
send(state.target, :flush)
end
{:ok, state}
end
def handle_event({_level, group_leader, _info}, state)
when node(group_leader) != node() do
{:ok, state}
end
def handle_event(_, %{target: nil} = state) do
{:ok, state}
end
def handle_event(
{level, _, {Logger, msg, timestamp, meta}},
%{target: target} = state
) do
with true <- should_log?(state, level),
meta <- Keyword.merge(meta, state.metadata),
{:ok, msg} <- format(state.formatter, [level, msg, timestamp, meta]) do
send_to_target(target, level, msg, timestamp, meta)
end
{:ok, state}
end
@spec should_log?(state, level) :: boolean
defp should_log?(%{level: right}, left),
do: :lt != Logger.compare_levels(left, right)
defp format(nil, [_, msg, _, _]), do: {:ok, msg}
defp format({mod, fun}, args), do: do_format(mod, fun, args)
defp format(fun, args), do: do_format(fun, args)
@spec do_format(function, list) :: {:ok, any} | :error
defp do_format(fun, args) do
{:ok, apply(fun, args)}
rescue
_ -> :error
end
@spec do_format(module, atom, list) :: {:ok, any} | :error
defp do_format(mod, fun, args) do
{:ok, apply(mod, fun, args)}
rescue
_ -> :error
end
@spec send_to_target(target, level, msg, timestamp, metadata) :: any
defp send_to_target(target, level, msg, timestamp, meta)
when is_function(target) do
apply(target, [level, msg, timestamp, meta])
rescue
_ -> nil
end
defp send_to_target({module, fun_name}, level, msg, timestamp, meta) do
apply(module, fun_name, [level, msg, timestamp, meta])
rescue
_ -> nil
end
defp send_to_target(target, level, msg, timestamp, meta) do
if process_alive?(target),
do: send(target, {level, msg, timestamp, meta})
end
@spec process_alive?(GenServer.name()) :: boolean
defp process_alive?(pid) when is_pid(pid), do: Process.alive?(pid)
defp process_alive?(name) when is_atom(name), do: Process.whereis(name) != nil
end
|
lib/uni_logger_backend.ex
| 0.861465
| 0.785473
|
uni_logger_backend.ex
|
starcoder
|
defmodule Pinterex.Api.Base do
@moduledoc """
The module contains all the logic that does the actual calls to
the Pinterest API
"""
use Tesla
plug Tesla.Middleware.BaseUrl, "https://api.pinterest.com/v1/"
plug Tesla.Middleware.Query, [access_token: key]
plug Tesla.Middleware.JSON
#plug Tesla.Middleware.DebugLogger
defp key do
Application.get_env(:pinterest, :key) ||
System.get_env("PINTEREST_KEY")
end
@doc """
This is the main function that does get requests.
## Parameters
- :get: it only matches get requests
- path: the path of the resource, this path should already contain
all the query field and everything
## Returns
The data of the returned response. If the response is not successful it
crashes since it cannot parse the response correctly.
"""
def execute_request(:get, create_struct, path) do
get(path)
|> handle_response(create_struct)
end
@doc """
This is the main function that does post requests.
## Parameters
- :post: it only matches post requests
- path: the path of the resource, this path should already contain
all the query field and everything
- data: the data to post. Example: %{board: "username/board_name"}
"""
def execute_request(:post, path, data) do
post(path, data)
|> handle_response
end
@doc """
This is the main function that does delete requests.
## Parameters
- :delete: it only matches delete requests
- path: the path of the resource, this path should already contain
all the query field and everything
"""
def execute_request(:delete, path) do
delete(path)
|> handle_response
end
def execute_request(:get, createStruct, path, []) do
execute_request(:get, createStruct, path)
end
@doc """
The function is used for doing get requests that have additional parameters.
## Parameters
- :get: it only matches get requests
- createStruct: if the response succeeds this is the function that will be used to created the structs from the data that was received
- path: the path of the resource, this path should already contain
all the query field and everything
- options: a list of options that should be added to the request. For example
we might wish to get bio and counts for a certain user. In this case we would
pass in ["bio", "counts"].
## Returns
The data of the returned response. If the response is not successful it
crashes since it cannot parse the response correctly.
"""
def execute_request(:get, createStruct, path, options) do
case List.keyfind(options, :next, 0) do
{:next, url} -> execute_request(:get, createStruct, url)
_ -> execute_request(:get, createStruct, path <> get_fields(path, options))
end
end
@doc """
This is the main function that does patch requests.
## Parameters
- :patch: it only matches patch requests
- path: the path of the resource, this path should already contain
all the query field and everything
- data: the data to . Example: %{name: "<NAME>"}
"""
def execute_request(:patch, create_struct, path, data) do
patch(path, data)
|> handle_response(create_struct)
end
def get_fields(path, options) do
start =
if(String.contains? path, "?") do
"&"
else
"?"
end
start <> Enum.join(Enum.map(options, &concat_items/1), "&")
end
defp concat_items({k, v}) when is_list(v) do
"#{k}=" <> Enum.join(v, ",")
end
defp concat_items({k, v}) do
"#{k}=#{v}"
end
defp handle_response(response, createStruct) do
case response.status do
200 -> {:ok, createStruct.(response.body)}
_ -> {:error, response.body["message"]}
end
end
defp handle_response(response) do
case response.status do
200 -> {:ok, response.body["data"]}
_ -> {:error, response.body["message"]}
end
end
end
|
lib/api/base.ex
| 0.682362
| 0.400163
|
base.ex
|
starcoder
|
defmodule Kino.Utils.Table do
@moduledoc false
# Common functions for handling various Elixir terms
# as table records.
@type record :: map() | list({term(), term()}) | tuple() | term()
@doc """
Computes table column specifications that accommodate
the given records.
Note that the columns are computed based on the first
record, if present.
"""
@spec columns_for_records(list(record())) :: list(Kino.Table.column())
def columns_for_records(records) do
case Enum.at(records, 0) do
nil -> []
first_record -> columns_for_record(first_record)
end
end
defp columns_for_record(record) when is_tuple(record) do
record
|> Tuple.to_list()
|> Enum.with_index()
|> Enum.map(&elem(&1, 1))
|> keys_to_columns()
end
defp columns_for_record(record) when is_map(record) do
if schema = ecto_schema(record) do
columns_for_schema(schema)
else
record |> Map.keys() |> Enum.sort() |> keys_to_columns()
end
end
defp columns_for_record(record) when is_list(record) do
record |> Keyword.keys() |> keys_to_columns()
end
defp columns_for_record(_record) do
# If the record is neither of the expected enumerables,
# we treat it as a single column value
keys_to_columns([:item])
end
@doc """
Converts keys to column specifications.
"""
@spec keys_to_columns(list(term())) :: list(Kino.Table.column())
def keys_to_columns(keys) do
Enum.map(keys, fn key -> %{key: key, label: inspect(key)} end)
end
@doc """
Computes table column specifications for the given Ecto schema.
"""
@spec columns_for_schema(module()) :: list(Kino.Table.column())
def columns_for_schema(schema) do
for field <- schema.__schema__(:fields) do
type = schema.__schema__(:type, field)
%{key: field, label: inspect(field), type: ecto_type_to_string(type)}
end
end
defp ecto_type_to_string({:parameterized, module, _info}), do: inspect(module)
defp ecto_type_to_string(type), do: inspect(type)
@doc """
Looks up record field value by key.
"""
@spec get_field(record(), key :: term()) :: value :: term()
def get_field(record, key)
def get_field(record, key) when is_tuple(record) do
if key < tuple_size(record) do
elem(record, key)
else
nil
end
end
def get_field(record, key) when is_list(record) do
record[key]
end
def get_field(record, key) when is_map(record) do
Map.get(record, key)
end
def get_field(record, :item) do
record
end
@doc """
Converts records to row specifications respecting the
given columns.
"""
@spec records_to_rows(list(record()), list(Kino.Table.column())) :: list(Kino.Table.row())
def records_to_rows(records, columns) do
for record <- records do
fields =
Map.new(columns, fn column ->
value = get_field(record, column.key)
{column.key, inspect(value)}
end)
%{fields: fields}
end
end
@doc """
Extracts schema module from the given struct or queryable.
If no schema found, `nil` is returned.
"""
@spec ecto_schema(queryable :: term()) :: module() | nil
def ecto_schema(queryable)
def ecto_schema(%{from: %{source: {_source, schema}}}) do
schema
end
def ecto_schema(queryable) when is_atom(queryable) do
if Code.ensure_loaded?(queryable) and function_exported?(queryable, :__schema__, 1) do
queryable
else
nil
end
end
def ecto_schema(struct) when is_struct(struct) do
ecto_schema(struct.__struct__)
end
def ecto_schema(_queryable), do: nil
end
|
lib/kino/utils/table.ex
| 0.714528
| 0.607489
|
table.ex
|
starcoder
|
defmodule Kiq do
@moduledoc ~S"""
Kiq is a robust and extensible job processing queue that aims for
compatibility with Sidekiq Enterprise.
Job queuing, processing and reporting are all built on GenStage. That means
maximum parallelism with the safety of backpressure as jobs are processed.
## Usage
Kiq isn't an application that must be started. Similarly to Ecto, you define
one or more Kiq modules within your application. This allows multiple
supervision trees with entirely different configurations.
Run the generator to create a new `Kiq` supervisor for your application:
mix kiq.gen.supervisor -m MyApp.Kiq
Alternatively, manually define a `Kiq` module for your application:
defmodule MyApp.Kiq do
use Kiq, queues: [default: 25, events: 50]
end
Include the module in your application's supervision tree:
defmodule MyApp.Application do
@moduledoc false
use Application
alias MyApp.{Endpoint, Kiq, Repo}
def start(_type, _args) do
children = [
{Repo, []},
{Endpoint, []},
{Kiq, []}
]
Supervisor.start_link(children, strategy: :one_for_one, name: MyApp.Supervisor)
end
end
## Configuration
Kiq is used to start one or more supervision trees in your application. That
means there isn't a central Kiq "app" to configure, instead each supervision
tree may be configured independently.
Configuration options pass through a couple of locations, accumulating
overrides until finally passing through the `init/2` callback. Options are
accumulated in this order:
1. Options passed into via the `use` macro. These should be constant compile
time options, i.e. `extra_reporters`.
2. Options passed to `start_link/1` by the application's supervision tree.
These should also be values suitable for compile time.
3. Injected by the `init/2` callback inside your application's Kiq instance.
This is where runtime configuration such as the Redis URL or environment
specific options should be passed. The options can come from any dynamic
source such as Mix Config, Vault, Etcd, environment variables, etc.
The default `init/2` implementation uses `System.get_env/1` to read the
`REDIS_URL` on boot. The default callback can be overridden to pull in
additional configuration. For example, to grab values from
`Application.get_env/2`:
def init(_reason, opts) do
for_env = Application.get_env(:my_app, :kiq, [])
opts =
opts
|> Keyword.merge(for_env)
|> Keyword.put(:client_opts, [redis_url: System.get_env("REDIS_URL")])
{:ok, opts}
end
The `opts` argument contains all configuration from stages 1 and 2 (the `use`
macro and the call to `start_link/1`).
### Supervisor Configuration
These configuration options must be provided to the supervision tree on startup:
* `:client_opts` — A keyword list of options passed to each `Redix`
connection. This *must* contain the key `:redis_url`, which is passed to
`Redix.start_link/1` as the first argument.
* `:dead_limit` — The maximum number of jobs that will be retained in the dead
set. Jobs beyond the limit are pruned any time a new job is moved to the dead
set. The default is `10_000`.
* `:dead_timeout` — The maximum amount of time that a job will remain in the
dead set before being purged, specified in seconds. The default is 6 months.
* `:extra_reporters` — Additional reporters that your application will use to
report errors, track external stats, etc.
* `:fetch_interval` — How frequently to poll for new jobs. Polling only
happens when consumers aren't actively requesting new jobs.
* `:flush_interval` - How frequently locally enqueued jobs will be pushed to
Redis. This defaults to `10ms`, though it will back-off by a factor of `1.5`
if there are any connection errors.
* `:periodics` — A list of job scheduling tuples in the form `{schedule,
worker}` or `{schedule, worker, options}`. See
[Periodic Jobs](#module-periodic-jobs) for details.
* `:pool_size` — Controls the number of Redis connections available to Kiq,
defaults to 5.
* `:queues` — A keyword list of queues where each entry is the name of the
queue and the concurrency setting. For example, setting `[default: 10,
exports: 5, media: 5]` would start the queues `default`, `exports` and
`media` with a combined concurrency of 20. The concurrency setting
specifies how many jobs _each queue_ will run concurrently.
* `:schedulers` — A list of schedulers to run. The default schedulers are
"retry" which is used to retry failed jobs with a backoff and "schedule",
used to enqueue jobs at a specific time in the future. Set this to an empty
list to disable all schedulers and allow Sidekiq to handle enqueuing
retries and scheduled jobs.
* `:server?` — Whether to start the queue supervisors and start processing
jobs or only start the client. This setting is useful for testing or
deploying your application's web and workers separately.
* `:test_mode` — Either `:disabled` or `:sandbox`. See
[Testing](#module-testing) for details.
### Runtime Configuration
These configuration options may be provided when starting a Kiq supervisor,
but may also be set dynamically at runtime:
* `:quiet` — Instruct each queue to stop processing new jobs. Any in-progress
jobs will keep running uninterrupted.
For example, to quiet all queue producers:
MyKiq.configure(quiet: true)
## Testing
Kiq has special considerations to facilitate isolated and asynchronous testing.
For testing Kiq should be configured in `:sandbox` mode and have the server
disabled. This can be done specifically for the test environment by adding
config to `config/test.exs`:
config :my_app, :kiq, server?: false, test_mode: :sandbox
Running in `:sandbox` mode ensures that enqueued jobs stay in memory and are
never flushed to Redis. This allows your tests to use `Kiq.Testing` to make
quick assertions about which jobs have or haven't been enqueued. See the docs
for `Kiq.Testing` for more details and usage.
## Reliable Push
Reliable push replicates the safety aspects of Sidekiq Pro's [reliability
client][rely]. To guard against network errors or other Redis issues the
client buffers all jobs locally. At a frequent interval the jobs are flushed
to Redis. If there are any errors while flushing the jobs will remain in
memory until flushing can be retried later.
### Caveats
* The local job buffer is stored in memory, if the server is restarted
suddently some jobs may be lost.
* There isn't any limit on the number of jobs that can be buffered.
However, to conserve space jobs are stored compressed.
[rely]: https://github.com/mperham/sidekiq/wiki/Pro-Reliability-Client
## Private Queues
Kiq tries to prevent all job loss through private queues, a variant of the
[Super Fetch][super] mechanism available in Sidekiq Pro. When jobs are
executed they are backed up to a private queue specific to the server
processing the job. If the processor crashes or the application is terminated
before the job is finished the jobs remain backed up. On startup, and
periodically afterward, jobs in any dead private queues are pushed back to
the public queue for re-execution.
This solution is well suited to containerized environments and autoscaling.
[super]: https://github.com/mperham/sidekiq/wiki/Pro-Reliability-Server#super_fetch
## Unique Jobs
Kiq supports Sidekiq Enterprise's [unique jobs][uniq]. This feature prevents
enqueueing duplicate jobs while an original job is still pending. The
operations that attempt to enforce uniqueness are _not_ atomic—uniquess is
not guaranteed and should be considered best effort.
Enable unique jobs for a worker by setting a unique time period:
use Kiq.Worker, unique_for: :timer.minutes(5)
### Unlock Policy
By default unique jobs will hold the unique lock until the job has ran
successfully. This policy ensures that jobs will remain locked if there are
any errors, but it is prone to race conditions in certain situations for
longer running jobs.
Generally it is best to stick with the default `:success` policy. However, if
your job is effected by the race condition you can change your worker's
policy:
use Kiq.Worker, unique_for: :timer.minutes(60), unique_until: :start
### Caveats
* Note that job uniqueness is calculated from the `class`, `args`, and
`queue`. This means that jobs with identical args may be added to different
queues.
* Unique jobs enqueued by Sidekiq will be unlocked by Kiq, but they may not
use the same lock value. This is due to differences between hashing Erlang
terms and Ruby objects. To help ensure uniqueness always enqueue unique jobs
from either Sidekiq or Kiq.
[uniq]: https://github.com/mperham/sidekiq/wiki/Ent-Unique-Jobs
## Expiring Jobs
Kiq supports Sidekiq Pro's [expiring jobs][expi]. Expiring jobs won't be ran
after a configurable amount of time. The expiration period is set with the
`expires_in` option, which accepts millisecond values identically
`unique_for`.
Set the expiration for a worker using a relative time:
use Kiq.Worker, expires_in: :timer.hours(1)
Expiration time applies after the scheduled time. That means scheduling a job
to run in an hour, with an expiration of 30 minutes, will expire in one hour
and 30 minutes.
[expi]: https://github.com/mperham/sidekiq/wiki/Pro-Expiring-Jobs
## Periodic Jobs
Kiq supports Sidekiq Enterprise's [Periodic Jobs][peri]. This allows jobs to
be registered with a schedule and enqueued automatically. Jobs are registered
as `{crontab, worker}` or `{crontab, worker, options}` using the `:periodics`
attribute:
use Kiq, periodics: [
{"* * * * *", MyApp.MinuteWorker},
{"0 * * * *", MyApp.HourlyWorker},
{"0 0 * * *", MyApp.DailyWorker, retry: 1},
]
These jobs would be executed as follows:
* `MyApp.MinuteWorker` - Executed once every minute
* `MyApp.HourlyWorker` - Executed at the first minute of every hour
* `MyApp.DailyWorker` - Executed at midnight every day
The crontab format, as parsed by `Kiq.Parser.Crontab`, respects all [standard
rules][cron] and has one minute resolution. That means it isn't possible to
enqueue a job ever N seconds.
### Caveats
* All schedules are evaluated as UTC, the local timezone is never taken
into account.
* Periodic jobs registered in Kiq _aren't_ visible in the Loop panel of the
Sidekiq Dashboard. This is due to the way loop data is stored by Sidekiq
and can't be worked around.
* This is an alternative to using using a separate scheduler such as
[Quantum][quan]. However, unlike Quantum, Kiq doesn't support node based
clustering, instead it uses Redis to coordinate and distrubte work. This
means workers can scale horizontally even in a restricted environment like
Heroku.
[peri]: https://github.com/mperham/sidekiq/wiki/Ent-Periodic-Jobs
[cron]: https://en.wikipedia.org/wiki/Cron#Overview
[quan]: https://github.com/quantum-elixir/quantum-core
## Instrumentation & Metrics
The instrumentation reporter provides integration with [Telemetry][tele], a
dispatching library for metrics. It is easy to report Kiq metrics to any
backend by attaching to `:kiq` events.
For exmaple, to log out the timing for all successful jobs:
defmodule KiqJobLogger do
require Logger
def handle_event([:kiq, :job, :success], timing, metadata, _config) do
Logger.info("[#{metadata.queue}] #{metadata.class} finished in #{timing}")
end
end
:telemetry.attach([:kiq, :job, :success], &KiqJobLogger.handle_event/4, nil)
Here is a reference for the available metrics:
| event | name | value | metadata |
| --------- | ------------------------ | ------ | ------------------------- |
| `started` | `[:kiq, :job, :started]` | 1 | `:class, :queue` |
| `success` | `[:kiq, :job, :success]` | timing | `:class, :queue` |
| `aborted` | `[:kiq, :job, :aborted]` | 1 | `:class, :queue, :reason` |
| `failure` | `[:kiq, :job, :failure]` | 1 | `:class, :queue, :error` |
[tele]: https://hexdocs.pm/telemetry
"""
alias Kiq.{Client, Job, Timestamp}
@type job_args :: map() | Keyword.t() | Job.t()
@type job_opts :: [in: pos_integer(), at: DateTime.t()]
@doc """
Starts the client and possibly the supervision tree, returning `{:ok, pid}` when startup is
successful.
Returns `{:error, {:already_started, pid}}` if the tree is already started or `{:error, term}`
in case anything else goes wrong.
## Options
Any options passed to `start_link` will be merged with those provided in the `use` block.
"""
@callback start_link(opts :: Keyword.t()) :: Supervisor.on_start()
@doc """
A callback executed when the supervision tree is started and possibly when configuration is
read.
The first argument is the context of the callback being invoked. In most circumstances this
will be `:supervisor`. The second argument is a keyword list of the combined options passed
to `use/1` and `start_link/1`.
Application configuration is _not_ passed into the `init/2` callback. To use application
config the callback must be overridden and merged manually.
"""
@callback init(reason :: :supervisor, opts :: Keyword.t()) :: {:ok, Keyword.t()} | :ignore
@doc """
Clear all enqueued, scheduled and backup jobs.
All known queues are cleared, even if they aren't listed in the current configuration.
"""
@callback clear() :: :ok
@doc """
Set runtime configuration values.
See the "Runtime Configuration" section in the `Kiq` module documentation.
"""
@callback configure(Keyword.t()) :: :ok
@doc """
Enqueue a job to be processed asynchronously.
Jobs can be enqueued from `Job` structs, maps or keyword lists.
## Options
* `in` - The amount of time in seconds to wait before processing the job. This must be a
positive integer.
* `at` - A specific `DateTime` in the future when the job should be processed.
## Examples
# Enqueue a job to be processed immediately
MyJob.new([1, 2]) |> MyKiq.enqueue()
# Enqueue a job in one minute
MyJob.new([1, 2]) |> MyKiq.enqueue(in: 60)
# Enqueue a job some time in the future
MyJob.new([1, 2]) |> MyKiq.enqueue(at: ~D[2020-09-20 12:00:00])
# Enqueue a job from scratch, without using a worker module
MyKiq.enqueue(class: "ExternalWorker", args: [1])
"""
@callback enqueue(job_args(), job_opts()) :: {:ok, Job.t()} | {:error, Exception.t()}
@doc false
defmacro __using__(opts) do
quote do
@behaviour Kiq
@client_name Module.concat(__MODULE__, "Client")
@pool_name Module.concat(__MODULE__, "Pool")
@registry_name Module.concat(__MODULE__, "Registry")
@reporter_name Module.concat(__MODULE__, "Reporter")
@senator_name Module.concat(__MODULE__, "Senator")
@supervisor_name Module.concat(__MODULE__, "Supervisor")
@opts unquote(opts)
|> Keyword.put(:main, __MODULE__)
|> Keyword.put(:name, @supervisor_name)
|> Keyword.put(:client_name, @client_name)
|> Keyword.put(:pool_name, @pool_name)
|> Keyword.put(:registry_name, @registry_name)
|> Keyword.put(:reporter_name, @reporter_name)
|> Keyword.put(:senator_name, @senator_name)
@doc false
def child_spec(opts) do
%{id: __MODULE__, start: {__MODULE__, :start_link, [opts]}, type: :supervisor}
end
@impl Kiq
def start_link(opts \\ []) do
@opts
|> Keyword.merge(opts)
|> Kiq.Supervisor.start_link()
end
@impl Kiq
def init(reason, opts) when is_atom(reason) and is_list(opts) do
client_opts = [redis_url: System.get_env("REDIS_URL")]
{:ok, Keyword.put(opts, :client_opts, client_opts)}
end
@impl Kiq
def clear do
Kiq.Client.clear(@client_name)
end
@impl Kiq
def configure(opts) when is_list(opts) do
Kiq.configure(@registry_name, Keyword.take(opts, [:quiet]))
end
@impl Kiq
def enqueue(job_args, job_opts \\ []) when is_map(job_args) or is_list(job_args) do
Kiq.enqueue(@client_name, job_args, job_opts)
end
defoverridable Kiq
end
end
@doc false
def enqueue(client, job_args, job_opts) do
job =
job_args
|> to_job()
|> with_opts(job_opts)
Client.store(client, job)
end
defp to_job(%Job{} = job), do: job
defp to_job(args), do: Job.new(args)
defp with_opts(job, []), do: job
defp with_opts(job, at: timestamp), do: %Job{job | at: timestamp}
defp with_opts(job, in: seconds), do: %Job{job | at: Timestamp.unix_in(seconds)}
@doc false
def configure(registry, opts) do
Registry.dispatch(registry, :config, fn entries ->
for {pid, _} <- entries, do: send(pid, {:configure, opts})
end)
end
end
|
lib/kiq.ex
| 0.912463
| 0.567098
|
kiq.ex
|
starcoder
|
defmodule Bitcoinex.Utils do
@moduledoc """
Contains useful utility functions used in Bitcoinex.
"""
@spec sha256(iodata()) :: binary
def sha256(str) do
:crypto.hash(:sha256, str)
end
@spec replicate(term(), integer()) :: list(term())
def replicate(_num, 0) do
[]
end
def replicate(x, num) when x > 0 do
for _ <- 1..num, do: x
end
@spec double_sha256(iodata()) :: binary
def double_sha256(preimage) do
:crypto.hash(
:sha256,
:crypto.hash(:sha256, preimage)
)
end
@spec hash160(iodata()) :: binary
def hash160(preimage) do
:crypto.hash(
:ripemd160,
:crypto.hash(:sha256, preimage)
)
end
@typedoc """
The pad_type describes the padding to use.
"""
@type pad_type :: :leading | :trailing
@doc """
pads binary according to the byte length and the padding type. A binary can be padded with leading or trailing zeros.
"""
@spec pad(bin :: binary, byte_len :: integer, pad_type :: pad_type) :: binary
def pad(bin, byte_len, _pad_type) when is_binary(bin) and byte_size(bin) == byte_len do
bin
end
def pad(bin, byte_len, pad_type) when is_binary(bin) and pad_type == :leading do
pad_len = 8 * byte_len - byte_size(bin) * 8
<<0::size(pad_len)>> <> bin
end
def pad(bin, byte_len, pad_type) when is_binary(bin) and pad_type == :trailing do
pad_len = 8 * byte_len - byte_size(bin) * 8
bin <> <<0::size(pad_len)>>
end
def int_to_little(i, p) do
i
|> :binary.encode_unsigned(:little)
|> pad(p, :trailing)
end
def little_to_int(i), do: :binary.decode_unsigned(i, :little)
def encode_int(i) when i > 0 do
cond do
i < 0xFD -> :binary.encode_unsigned(i)
i <= 0xFFFF -> <<0xFD>> <> int_to_little(i, 2)
i <= 0xFFFFFFFF -> <<0xFE>> <> int_to_little(i, 4)
i <= 0xFFFFFFFFFFFFFFFF -> <<0xFF>> <> int_to_little(i, 8)
true -> {:error, "invalid integer size"}
end
end
def hex_to_bin(str) do
str
|> String.downcase()
|> Base.decode16(case: :lower)
|> case do
# In case of error, its already binary or its invalid
:error -> {:error, "invalid string"}
# valid binary
{:ok, bin} -> bin
end
end
end
|
lib/utils.ex
| 0.860266
| 0.506347
|
utils.ex
|
starcoder
|
defmodule Apq.DocumentProvider do
@moduledoc """
Apq document provider or Absinthe plug.
### Example
Define a new module and `use Apq.DocumentProvider`:
```elixir
defmodule ApqExample.Apq do
use Apq.DocumentProvider,
cache_provider: ApqExample.Cache,
max_query_size: 16384 # default
end
```
#### Options
- `:cache_provider` -- Module responsible for cache retrieval and placement. The cache provider needs to follow the `Apq.CacheProvider` behaviour.
- `:max_query_size` -- (Optional) Maximum number of bytes of the graphql query document. Defaults to 16384 bytes (16kb).
- `:json_codec` -- (Optional) Only required if using GET for APQ's hashed queries. Must respond to `decode!/1`.
- `:strategy` -- Strategy whether to cache raw graphql strings, or the parsed/validated blueprint. Defaults to raw.
Example configuration for using Apq in `Absinthe.Plug`. Same goes for configuring
Phoenix.
match("/api",
to: Absinthe.Plug,
init_opts: [
schema: ApqExample.Schema,
json_codec: Jason,
interface: :playground,
document_providers: [ApqExample.Apq, Absinthe.Plug.DocumentProvider.Default]
]
)
When the Apq document provider does not match (i.e. the apq extensions are not set in the request),
the request is passed to the next document provider. This will most likely by the default
provider available (`Absinthe.Plug.DocumentProvider.Default`).
"""
# Maximum query size
@max_query_size 16_384
require Logger
defmacro __using__(opts) do
cache_provider = Keyword.fetch!(opts, :cache_provider)
max_query_size = Keyword.get(opts, :max_query_size, @max_query_size)
json_codec = Keyword.get(opts, :json_codec)
strategy = Keyword.get(opts, :strategy, Apq.Strategy.RawQuery)
quote do
@behaviour Absinthe.Plug.DocumentProvider
Module.put_attribute(__MODULE__, :max_query_size, unquote(max_query_size))
def pipeline(options) do
Apq.DocumentProvider.pipeline(options,
json_codec: unquote(json_codec),
strategy: unquote(strategy),
cache_provider: unquote(cache_provider)
)
end
@doc """
Handles any requests with the Apq extensions and forwards those without
to the next document provider.
"""
def process(%{params: params} = request, opts) do
opts =
Keyword.merge(
[
json_codec: unquote(json_codec),
max_query_size: unquote(max_query_size),
cache_provider: unquote(cache_provider),
strategy: unquote(strategy)
],
opts
)
Apq.DocumentProvider.process(request, opts)
end
defoverridable pipeline: 1
end
end
@doc """
Handles any requests with the Apq extensions and forwards those without
to the next document provider.
"""
def process(request, opts) do
cache_provider = Keyword.fetch!(opts, :cache_provider)
json_codec = Keyword.get(opts, :json_codec)
max_query_size = Keyword.get(opts, :max_query_size)
processed_params =
request.params
|> format_params(json_codec)
|> process_params()
case processed_params do
{hash, nil} -> get_document(cache_provider, request, hash)
{hash, query} -> store_document(request, hash, query, max_query_size)
_ -> {:cont, request}
end
end
@doc """
Determine the remaining pipeline for a request with an apq document.
"""
def pipeline(%{pipeline: as_configured} = request, opts) do
opts[:strategy].pipeline(as_configured, [digest: request.document.digest] ++ opts)
end
defp store_document(request, _digest, query, max_query_size)
when byte_size(query) > max_query_size do
{:halt, %{request | document: %Apq{error: :apq_query_max_size_error}}}
end
defp store_document(request, digest, query, _max_query_size)
when is_binary(query) and is_binary(digest) do
calculated_digest = Apq.Digest.digest(query)
case calculated_digest == digest do
true ->
{:halt,
%{
request
| document: %Apq{
action: :apq_stored,
document: query,
digest: digest
}
}}
false ->
{:halt, %{request | document: %Apq{error: :apq_hash_match_error, document: query}}}
end
end
defp store_document(request, hash, _, _max_query_size)
when is_binary(hash) do
{:halt, %{request | document: %Apq{error: :apq_query_format_error}}}
end
defp store_document(request, _hash, query, _max_query_size)
when is_binary(query) do
{:halt, %{request | document: %Apq{error: :apq_hash_format_error}}}
end
defp get_document(cache_provider, request, digest) when is_binary(digest) do
case cache_provider.get(digest) do
# Cache miss
{:ok, nil} ->
{:halt, %{request | document: %Apq{error: :apq_not_found_error}}}
# Cache hit
{:ok, document} ->
{:halt,
%{
request
| document: %Apq{
action: :apq_found,
document: document,
digest: digest
}
}}
_error ->
Logger.warn("Error occured getting cache entry for #{digest}")
{:cont, request}
end
end
defp get_document(_cache_provider, request, _) do
{:halt, %{request | document: %Apq{error: :apq_hash_format_error}}}
end
defp format_params(%{"extensions" => extensions} = params, json_codec)
when is_binary(extensions) do
case Kernel.function_exported?(json_codec, :decode!, 1) do
true ->
Map.put(params, "extensions", json_codec.decode!(extensions))
_ ->
raise RuntimeError, message: "json_codec must be specified and respond to decode!/1"
end
end
defp format_params(params, _json_codec), do: params
defp process_params(%{
"query" => query,
"extensions" => %{"persistedQuery" => %{"version" => 1, "sha256Hash" => hash}}
}) do
{hash, query}
end
defp process_params(%{
"extensions" => %{"persistedQuery" => %{"version" => 1, "sha256Hash" => hash}}
}) do
{hash, nil}
end
defp process_params(params), do: params
end
|
lib/apq/document_provider.ex
| 0.804214
| 0.762601
|
document_provider.ex
|
starcoder
|
defmodule RaceConditionBank do
@doc """
## Example 1 Manual Way:
iex[1]> alias RaceConditionBank, as: RCB
RaceConditionBank
iex[2]> RCB.new_account(1, 100)
{:ok, #PID<0.157.0>}
iex[3]> RCB.credit(1, 50)
:ok
iex[4]> :sys.get_state(1 |> Integer.to_string |> String.to_atom)
150
iex[5]> RCB.debit(1, 50)
:ok
iex[6]> :sys.get_state(1 |> Integer.to_string |> String.to_atom)
100
## Example 2 Async Way:
iex[1]> alias RaceConditionBank, as: RCB
RaceConditionBank
iex[2]> RCB.async_mischief()
299
iex[3]> RCB.async_mischief()
301
iex[4]> RCB.async_mischief()
306
iex[5]> RCB.async_mischief()
302
iex[6]> RCB.async_mischief()
316
## Example 3 Sync Way:
iex[1]> alias RaceConditionBank, as: RCB
RaceConditionBank
iex[2]> RCB.sync_mischief()
300
iex[3]> RCB.sync_mischief()
300
iex[4]> RCB.sync_mischief()
300
iex[5]> RCB.sync_mischief()
300
iex[6]> RCB.sync_mischief()
300
"""
alias RaceConditionBank.Bank
def new_account(account_number, balance) do
Bank.start_link(account_number, balance)
end
def credit(account_number, amount) do
Bank.credit(account_number, amount)
end
def debit(account_number, amount) do
Bank.debit(account_number, amount)
end
def transfer(from_account_number, to_account_number, amount) do
debit(from_account_number, amount)
credit(to_account_number, amount)
end
def async_mischief() do
Enum.each((1..3), & new_account(&1, 100))
1..100
|> Enum.map(fn _ ->
(1..3)
|> Enum.shuffle
|> Enum.take(2)
|> async_transfer(:random.uniform(3))
end)
|> Enum.map(&Task.await/1)
# now get the total
~w[1 2 3]a
|> Enum.map(fn n -> :sys.get_state(n) end)
|> Enum.sum
end
def async_transfer([from_account_number, to_account_number], amount) do
Task.async(fn ->
transfer(from_account_number, to_account_number, amount)
end)
end
def sync_mischief() do
Enum.each((1..3), & new_account(&1, 100))
1..100
|> Enum.map(fn _ ->
(1..3)
|> Enum.shuffle
|> Enum.take(2)
|> sync_transfer(:random.uniform(3))
end)
# now get the total
~w[1 2 3]a
|> Enum.map(fn n -> :sys.get_state(n) end)
|> Enum.sum
end
def sync_transfer([from_account_number, to_account_number], amount) do
transfer(from_account_number, to_account_number, amount)
end
end
|
2020/otp/race_condition_bank/lib/race_condition_bank.ex
| 0.544196
| 0.407687
|
race_condition_bank.ex
|
starcoder
|
defmodule Iodized.DefinitionJson do
defprotocol Json do
def to_json(definition)
end
def from_json(nil) do
nil
end
def from_json(definition) do
operand = Dict.fetch!(definition, :operand)
from_json(operand, definition)
end
defp from_json("any", definition) do
%Iodized.Definition.Any{definitions: Enum.map(Dict.fetch!(definition, :definitions), &from_json/1)}
end
defimpl Json, for: Iodized.Definition.Any do
def to_json(any) do
%{operand: "any", definitions: Enum.map(any.definitions || [], &Json.to_json(&1))}
end
end
defp from_json("all", definition) do
%Iodized.Definition.All{definitions: Enum.map(Dict.fetch!(definition, :definitions), &from_json/1)}
end
defimpl Json, for: Iodized.Definition.All do
def to_json(all) do
%{operand: "all", definitions: Enum.map(all.definitions || [], &Json.to_json(&1))}
end
end
defp from_json("none", definition) do
%Iodized.Definition.None{definitions: Enum.map(Dict.fetch!(definition, :definitions), &from_json/1)}
end
defimpl Json, for: Iodized.Definition.None do
def to_json(none) do
%{operand: "none", definitions: Enum.map(none.definitions || [], &Json.to_json(&1))}
end
end
defp from_json("included_in", definition) do
actual_state_param_name = Dict.fetch!(definition, :param_name)
allowed_values = Dict.fetch!(definition, :value)
true = is_list(allowed_values) # validate we've got a list
%Iodized.Definition.IncludedIn{actual_state_param_name: actual_state_param_name, allowed_values: allowed_values}
end
defimpl Json, for: Iodized.Definition.IncludedIn do
def to_json(included_in) do
%{operand: "included_in",
param_name: included_in.actual_state_param_name,
value: included_in.allowed_values}
end
end
defp from_json("is", definition) do
actual_state_param_name = Dict.fetch!(definition, :param_name)
allowed_value = Dict.fetch!(definition, :value)
%Iodized.Definition.Is{actual_state_param_name: actual_state_param_name, allowed_value: allowed_value}
end
defimpl Json, for: Iodized.Definition.Is do
def to_json(is) do
%{operand: "is", param_name: is.actual_state_param_name, value: is.allowed_value}
end
end
defp from_json("percentage", definition) do
actual_state_param_name = Dict.fetch!(definition, :param_name)
threshold = Dict.fetch!(definition, :value)
%Iodized.Definition.Percentage{
actual_state_param_name: actual_state_param_name,
threshold: String.to_integer(threshold),
}
end
defimpl Json, for: Iodized.Definition.Percentage do
def to_json(percentage) do
%{
operand: "percentage",
param_name: percentage.actual_state_param_name,
value: percentage.threshold,
}
end
end
defimpl Json, for: Atom do
def to_json(true), do: %{operand: "boolean", value: true}
def to_json(false), do: %{operand: "boolean", value: false}
def to_json(nil), do: nil
end
end
|
lib/iodized/definition_json.ex
| 0.688049
| 0.408955
|
definition_json.ex
|
starcoder
|
defmodule GatherSubmissions.Submission.Utils do
@moduledoc """
Contains some utility functions to work with DOMjudge submissions.
"""
alias GatherSubmissions.Submission
alias GatherSubmissions.Submission.File, as: SubFile
alias GatherSubmissions.Student
@doc """
Given a list of submissions, removes those submitted after the given `deadline`.
"""
@spec reject_after_deadline([Submission.t()], NaiveDateTime.t()) :: [Submission.t()]
def reject_after_deadline(submissions, deadline) do
submissions
|> Enum.reject(&(NaiveDateTime.compare(&1.time, deadline) == :gt))
end
@doc """
Given a list of submissions, removes those submitted after the one with the given id.
"""
@spec reject_after_submission([Submission.t()], String.t()) :: [Submission.t()]
def reject_after_submission(submissions, last_submission) do
submissions
|> Enum.reject(&(String.to_integer(&1.id) > last_submission))
end
@doc """
Classifies the submissions according to the group to which they belong.
If two students belong to the same group, their submissions will appear in the list corresponding
to that group.
This function returns a map that associates group identifiers with their corresponding lists of
submissions.
"""
@spec classify_by_group([Submission.t()], [Student.t()]) :: %{String.t() => [Submission.t()]}
def classify_by_group(submissions, students) do
user_table = students |> Enum.map(&{&1.user, &1}) |> Enum.into(%{})
groups = get_all_groups(students)
empty_map = groups |> Enum.map(&{&1, []}) |> Enum.into(%{})
groups_map = submissions |> Enum.group_by(fn sub -> user_table[sub.user].group end)
merged = Map.merge(empty_map, groups_map)
:maps.map(
fn _, list ->
Enum.sort_by(list, &String.to_integer(&1.id), :desc)
end,
merged
)
end
@doc """
Returns the first submission on the list for which the given predicate holds.
"""
@spec first_submission([Submission.t()], (Submission.t() -> boolean())) :: Submission.t() | nil
def first_submission(submissions, verdict_fun \\ fn _ -> true end) do
Enum.find(submissions, nil, verdict_fun)
end
@doc """
Downloads and creates the files corresponding to a given submission.
All files will be created in `root_dir/subdir` directory, which will be created if necessary.
Before writing the file, the function `transform_content` will be applied to its contents. This is
useful for preprocessing the file (e.g. stripping away content) before writing it.
It returns the list of the names of the generated files, **relative to `root_dir`**.
"""
@spec create_local_files(Submission.t(), String.t(), String.t(), (String.t() -> String.t())) ::
[String.t()]
def create_local_files(
%Submission{} = submission,
root_dir,
subdir,
transform_content \\ fn content -> content end
) do
subdir_name = Path.join(root_dir, subdir)
File.mkdir_p!(subdir_name)
submission.files.()
|> Enum.map(fn %SubFile{name: name, content: content} ->
local_file_name = Path.join(subdir_name, name)
File.write!(local_file_name, transform_content.(content))
Path.join(subdir, name)
end)
end
defp get_all_groups(students) do
students
|> Enum.map(& &1.group)
|> Enum.dedup()
end
end
|
lib/submissions/utils.ex
| 0.802323
| 0.456531
|
utils.ex
|
starcoder
|
defmodule Elixium.BlockEncoder do
alias Elixium.Block
@moduledoc """
Provides functionality for encoding and decoding blocks
"""
@encoding_order [
:index, :hash, :previous_hash,
:merkle_root, :timestamp, :nonce,
:difficulty, :version, :transactions
]
@doc """
Encode a block to a binary representation based on the encoding order:
@encoding_order
"""
@spec encode(Block) :: binary
def encode(block) do
block = Map.delete(block, :__struct__)
Enum.reduce(@encoding_order, <<>>, fn attr, bin -> encode(attr, bin, block[attr]) end)
end
defp encode(:difficulty, bin, value) do
# Convert to binary and strip out ETF bytes (we dont need them for storage,
# we can add them back in when we need to read)
<<131, 70, difficulty::binary>> = :erlang.term_to_binary(value)
bin <> difficulty
end
defp encode(:transactions, bin, value) do
# Add transactions in as raw ETF encoding for easy decoding later
bin <> :erlang.term_to_binary(value)
end
defp encode(:hash, bin, value), do: b16encode(bin, value)
defp encode(:previous_hash, bin, value), do: b16encode(bin, value)
defp encode(:merkle_root, bin, value), do: b16encode(bin, value)
defp encode(_attr, bin, value) when is_binary(value) do
bin <> value
end
defp encode(_attr, bin, value) when is_number(value) do
bin <> :binary.encode_unsigned(value)
end
defp b16encode(bin, value), do: bin <> Base.decode16!(value)
@doc """
Decode a block from binary that was previously encoded by encode/1
"""
@spec decode(binary) :: Block
def decode(block_binary) do
<<index::bytes-size(4),
hash::bytes-size(32),
previous_hash::bytes-size(32),
merkle_root::bytes-size(32),
timestamp::bytes-size(4),
nonce::bytes-size(8),
difficulty::bytes-size(8),
version::bytes-size(2),
transactions::binary
>> = block_binary
%Block{
index: index,
hash: Base.encode16(hash),
previous_hash: Base.encode16(previous_hash),
merkle_root: Base.encode16(merkle_root),
timestamp: :binary.decode_unsigned(timestamp),
nonce: nonce,
difficulty: :erlang.binary_to_term(<<131, 70>> <> difficulty),
version: version,
transactions: :erlang.binary_to_term(transactions)
}
end
end
|
lib/encoding/block_encoder.ex
| 0.813609
| 0.434161
|
block_encoder.ex
|
starcoder
|
defmodule Stein.Storage do
@moduledoc """
`Stein.Storage` covers uploading, downloading, and deleting remote files
## Available backends
### FileBackend
The `Stein.Storage.FileBackend` is available for development purposes.
For the file backend, you can configure the folder Stein should use. This
should be a local folder that Elixir has read/write permissions to. It should
end with a trailing slash.
config :stein_storage,
backend: :file,
file_backend_folder: "uploads/"
The default folder is Stein's `priv/files`.
To fully support the file storage, you should also add a new `Plug.Static`
to your endpoint. This will let the URLs the backend returns load.
if Mix.env() == :dev do
plug(Plug.Static, at: "/uploads", from: "uploads/files")
end
### S3Backend
The `Stein.Storage.S3Backend` handles uploading, downloading, and deletes
from Amazon S3.
For the S3 backend, you can also configure the bucket Stein should upload to.
config :stein_storage,
backend: :s3,
bucket: "my-bucket"
### MockBackend
The `Stein.Storage.MockBackend` mocks out all actions for use in tests. Each
action is a no-op.
config :stein_storage,
backend: :test
"""
alias Stein.Storage.FileBackend
alias Stein.Storage.FileUpload
alias Stein.Storage.MockBackend
alias Stein.Storage.S3Backend
@typedoc """
A processed file ready for uploading
"""
@type file :: FileUpload.t()
@typedoc """
Key for where the document will be stored
Must start with a leading `/`
"""
@type key :: String.t()
@typedoc """
Options for a function
"""
@type opts :: Keyword.t()
@typedoc """
A local file path
"""
@type local_path :: Path.t()
@typedoc """
The URL for viewing the remote file
"""
@type url :: String.t()
@doc """
Delete files from remote storage
"""
@callback delete(key()) :: :ok
@doc """
Download files from remote storage
*Note*: this creates a temporary file and must be cleaned up manually
"""
@callback download(key()) :: {:ok, local_path()}
@doc """
Upload files to the remote storage
"""
@callback upload(file(), key(), opts()) :: :ok | :error
@doc """
Get the remote url for viewing an uploaded file
"""
@callback url(key(), opts()) :: url()
@doc false
def config({:system, name}), do: System.get_env(name)
def config(value), do: value
@doc """
Delete files from remote storage
"""
@spec delete(key()) :: :ok
def delete(key) do
backend().delete(key)
end
@doc """
Download files from remote storage
*Note*: this creates a temporary file and must be cleaned up manually
"""
@spec download(key()) :: {:ok, local_path()}
def download(key) do
backend().download(key)
end
@doc """
Upload files to the remote storage
## Limiting extensions
You can limit extensions with the `extensions` option. Only the extensions in the list
will be allowed, any other extension will be rejected with `{:error, :invalid_extension}`.
Each extension should start with a `.`.
Stien.Storage.upload(file, key, extensions: [".jpg", ".png"])
"""
@spec upload(file(), key(), opts()) :: :ok | {:error, :invalid_extension} | {:error, :uploading}
def upload(file, key, opts) do
path = prep_file(file)
with {:ok, :extension} <- check_extensions(path, opts) do
backend().upload(path, key, opts)
end
end
@doc false
def check_extensions(file, opts) do
allowed_extensions = Keyword.get(opts, :extensions)
check_allowed_extensions(file, allowed_extensions)
end
defp check_allowed_extensions(_file, nil), do: {:ok, :extension}
defp check_allowed_extensions(file, allowed_extensions) do
extension = String.downcase(file.extension)
case extension in allowed_extensions do
true ->
{:ok, :extension}
false ->
{:error, :invalid_extension}
end
end
@doc """
Get the remote url for viewing an uploaded file
"""
@spec url(key(), opts()) :: url()
def url(key, opts \\ []) do
backend().url(key, opts)
end
@doc """
Prepare a file for upload to the backend
Must be a `Stein.Storage.FileUpload`, `Plug.Upload`, or a map that
has the `:path` key.
"""
@spec prep_file(file()) :: file()
@spec prep_file(Plug.Upload.t()) :: file()
@spec prep_file(%{path: String.t()}) :: file()
def prep_file(upload = %FileUpload{}), do: upload
def prep_file(upload = %Plug.Upload{}) do
%FileUpload{
filename: upload.filename,
extension: Path.extname(upload.filename),
path: upload.path
}
end
def prep_file(upload) when is_map(upload) do
filename = Path.basename(upload.path)
%FileUpload{
filename: Path.basename(upload.path),
extension: Path.extname(filename),
path: upload.path
}
end
@doc false
def backend() do
case Application.get_env(:stein_storage, :backend) do
:file ->
FileBackend
:s3 ->
S3Backend
:test ->
MockBackend
end
end
end
|
lib/stein/storage.ex
| 0.778355
| 0.487978
|
storage.ex
|
starcoder
|
defmodule Lullabeam.InputDevices.JellyCombKeypad do
@moduledoc """
Interpret keystrokes coming from a Jelly Comb CP001878, described on Amazon
as "USB Numeric Keypad, Jelly Comb N001 Portable Slim Mini Number Pad for
Laptop Desktop Computer PC, Full Size 19 Key, Big Print Letters - Black"
Layout:
┌───┐ ┌───┐ ┌───┐ ┌───┐
│Num│ │ / │ │ * │ │BS │
└───┘ └───┘ └───┘ └───┘
┌───┐ ┌───┐ ┌───┐ ┌───┐
│ 7 │ │ 8 │ │ 9 │ │ - │
└───┘ └───┘ └───┘ └───┘
┌───┐ ┌───┐ ┌───┐ ┌───┐
│ 4 │ │ 5 │ │ 6 │ │ + │
└───┘ └───┘ └───┘ └───┘
┌───┐ ┌───┐ ┌───┐ ┌───┐
│ 1 │ │ 2 │ │ 3 │ │ E │
└───┘ └───┘ └───┘ │ n │
┌───┐ ┌───┐ ┌───┐ │ t │
│ 0 │ │00 │ │ . │ │ r │
└───┘ └───┘ └───┘ └───┘
Mapping:
┌───┐ ┌───┐ ┌───┐ ┌───┐
│ x │ │🌙 │ │🌕 │ │🌞 │
└───┘ └───┘ └───┘ └───┘
┌───┐ ┌───┐ ┌───┐ ┌───┐
│ ⟵ │ │ ⟶ │ │ ? │ │ ? │
└───┘ └───┘ └───┘ └───┘
┌───┐ ┌───┐ ┌───┐ ┌───┐
│⏪ │ │ ⏯️ │ │ ⏩│ │🛑 │
└───┘ └───┘ └───┘ └───┘
┌───┐ ┌───┐ ┌───┐ ┌───┐
│ ? │ │ ? │ │ ? │ │ │
└───┘ └───┘ └───┘ │ │
┌───┐ ┌───┐ ┌───┐ │ ? │
│ x │ │ x │ │ ? │ │ │
└───┘ └───┘ └───┘ └───┘
🌙 = nap mode
🌕 = bed mode
🌞 = day mode
⟵ = previous folder
⟶ = next folder
⏪ = previous track
⏩ = next track
⏯️ = play / pause
🛑 = stop
Note: the "00" key register as two presses of "0".
Because of this, it's best not to assign anything to "0".
Also, numlock toggles an LED so I'd rather not use it.
"""
def device_name do
"HID 04d9:1203"
end
def interpret({:ev_key, :key_kpslash, 0 = _keyup}), do: {:cmd, {:set_mode, :nap}}
def interpret({:ev_key, :key_kpasterisk, 0 = _keyup}), do: {:cmd, {:set_mode, :bed}}
def interpret({:ev_key, :key_backspace, 0 = _keyup}), do: {:cmd, {:set_mode, :wake}}
def interpret({:ev_key, :key_kp7, 0 = _keyup}), do: {:cmd, :prev_folder}
def interpret({:ev_key, :key_kp8, 0 = _keyup}), do: {:cmd, :next_folder}
def interpret({:ev_key, :key_kp4, 0 = _keyup}), do: {:cmd, :prev_track}
def interpret({:ev_key, :key_kp5, 0 = _keyup}), do: {:cmd, :play_or_pause}
def interpret({:ev_key, :key_kp6, 0 = _keyup}), do: {:cmd, :next_track}
def interpret({:ev_key, :key_kpplus, 0 = _keyup}), do: {:cmd, :stop}
# Unassigned:
# [:key_kp0, :key_kp1, :key_kp2, :key_kp3, :key_kp9, :key_kpdot,
# :key_numlock, :key_kpenter, key_kpminus]
def interpret(_e), do: :unknown
end
|
lib/lullabeam/input_devices/jelly_comb_keypad.ex
| 0.538255
| 0.666893
|
jelly_comb_keypad.ex
|
starcoder
|
defmodule Singyeong.Utils do
@moduledoc """
Some utility functions that don't really belong in any one place.
"""
@spec fast_list_concat(list(), list()) :: list()
def fast_list_concat(a, b) do
# See #72 for why this check is needed
cond do
a == nil ->
b
b == nil ->
a
is_list(a) and is_list(b) ->
# See https://github.com/devonestes/fast-elixir/blob/master/code/general/concat_vs_cons.exs
List.flatten [a | b]
is_list(a) and not is_list(b) ->
fast_list_concat a, [b]
not is_list(a) and is_list(b) ->
fast_list_concat [a], b
end
end
# Check out https://stackoverflow.com/a/43881511
def module_loaded?(module), do: function_exported?(module, :__info__, 1)
def ip_to_string(ip) do
case ip do
{a, b, c, d} ->
"#{a}.#{b}.#{c}.#{d}"
{a, b, c, d, e, f, g, h} ->
"#{hex a}:#{hex b}:#{hex c}:#{hex d}:#{hex e}:#{hex f}:#{hex g}:#{hex h}"
end
end
defp hex(v) do
v
|> Integer.to_string(16)
|> String.pad_leading(4, "0")
|> String.downcase
end
@spec parse_route(binary(), binary()) :: {:ok, map()} | :error
def parse_route(template, actual) do
template_parts = route_to_parts template
actual_parts = route_to_parts actual
zipped = Enum.zip template_parts, actual_parts
length_same? = length(template_parts) == length(actual_parts)
if length_same? and route_matches_template?(zipped) do
params =
zipped
|> Enum.reduce(%{}, fn {template_part, actual_part}, acc ->
if String.starts_with?(template_part, ":") do
":" <> param = template_part
Map.put acc, param, actual_part
else
acc
end
end)
{:ok, params}
else
:error
end
end
defp route_to_parts(route) do
route
|> String.split(~r/\/+/)
|> Enum.filter(fn part -> part != "" end)
end
defp route_matches_template?(zipped_list) do
zipped_list
|> Enum.all?(fn {template_part, actual_part} ->
template_part == actual_part or String.starts_with?(template_part, ":")
end)
end
def stringify_keys(map, recurse? \\ false)
def stringify_keys(map, recurse?) when is_map(map) do
map
|> Enum.map(fn {k, v} ->
if is_binary(k) do
{k, stringify_keys(v)}
else
if recurse? do
{Atom.to_string(k), stringify_keys(v)}
else
{Atom.to_string(k), v}
end
end
end)
|> Enum.into(%{})
end
def stringify_keys(not_map, _), do: not_map
def destructify(map) when is_map(map) do
map
|> Enum.map(fn {k, v} ->
cond do
is_struct(v) ->
{k, destructify(Map.from_struct(v))}
is_map(v) ->
{k, destructify(v)}
is_list(v) ->
{k, Enum.map(v, &destructify/1)}
true ->
{k, v}
end
end)
|> Enum.into(%{})
end
def destructify(not_map), do: not_map
def now, do: :os.system_time :millisecond
def random_string(length) do
length
|> :crypto.strong_rand_bytes
|> Base.url_encode64(padding: false)
end
end
|
lib/singyeong/utils.ex
| 0.763836
| 0.414425
|
utils.ex
|
starcoder
|
defmodule Plug.Crypto do
@moduledoc """
Namespace and module for crypto-related functionality.
Please see `Plug.Crypto.KeyGenerator`, `Plug.Crypto.MessageEncryptor`,
and `Plug.Crypto.MessageVerifier` for more functionality.
"""
use Bitwise
@doc """
Prunes the stacktrace to remove any argument trace.
This is useful when working with functions that receives secrets
and we want to make sure those secrets do not leak on error messages.
"""
@spec prune_args_from_stacktrace(Exception.stacktrace()) :: Exception.stacktrace()
def prune_args_from_stacktrace(stacktrace)
def prune_args_from_stacktrace([{mod, fun, [_ | _] = args, info} | rest]),
do: [{mod, fun, length(args), info} | rest]
def prune_args_from_stacktrace(stacktrace) when is_list(stacktrace),
do: stacktrace
@doc """
A restricted version of `:erlang.binary_to_term/2` that forbids
*executable* terms, such as anonymous functions.
The `opts` are given to the underlying `:erlang.binary_to_term/2`
call, with an empty list as a default.
By default this function does not restrict atoms, as an atom
interned in one node may not yet have been interned on another
(except for releases, which preload all code).
If you want to avoid atoms from being created, then you can pass
`[:safe]` as options, as that will also enable the safety mechanisms
from `:erlang.binary_to_term/2` itself.
"""
@spec safe_binary_to_term(binary(), [atom()]) :: term()
def safe_binary_to_term(binary, opts \\ []) when is_binary(binary) do
term = :erlang.binary_to_term(binary, opts)
safe_terms(term)
term
end
defp safe_terms(list) when is_list(list) do
safe_list(list)
end
defp safe_terms(tuple) when is_tuple(tuple) do
safe_tuple(tuple, tuple_size(tuple))
end
defp safe_terms(map) when is_map(map) do
folder = fn key, value, acc ->
safe_terms(key)
safe_terms(value)
acc
end
:maps.fold(folder, map, map)
end
defp safe_terms(other)
when is_atom(other) or is_number(other) or is_bitstring(other) or is_pid(other) or
is_reference(other) do
other
end
defp safe_terms(other) do
raise ArgumentError,
"cannot deserialize #{inspect(other)}, the term is not safe for deserialization"
end
defp safe_list([]), do: :ok
defp safe_list([h | t]) when is_list(t) do
safe_terms(h)
safe_list(t)
end
defp safe_list([h | t]) do
safe_terms(h)
safe_terms(t)
end
defp safe_tuple(_tuple, 0), do: :ok
defp safe_tuple(tuple, n) do
safe_terms(:erlang.element(n, tuple))
safe_tuple(tuple, n - 1)
end
@doc """
Masks the token on the left with the token on the right.
Both tokens are required to have the same size.
"""
@spec mask(binary(), binary()) :: binary()
def mask(left, right) do
mask(left, right, "")
end
defp mask(<<x, left::binary>>, <<y, right::binary>>, acc) do
mask(left, right, <<acc::binary, x ^^^ y>>)
end
defp mask(<<>>, <<>>, acc) do
acc
end
@doc """
Compares the two binaries (one being masked) in constant-time to avoid
timing attacks.
It is assumed the right token is masked according to the given mask.
"""
@spec masked_compare(binary(), binary(), binary()) :: boolean()
def masked_compare(left, right, mask)
when is_binary(left) and is_binary(right) and is_binary(mask) do
byte_size(left) == byte_size(right) and masked_compare(left, right, mask, 0)
end
defp masked_compare(<<x, left::binary>>, <<y, right::binary>>, <<z, mask::binary>>, acc) do
xorred = x ^^^ (y ^^^ z)
masked_compare(left, right, mask, acc ||| xorred)
end
defp masked_compare(<<>>, <<>>, <<>>, acc) do
acc === 0
end
@doc """
Compares the two binaries in constant-time to avoid timing attacks.
See: http://codahale.com/a-lesson-in-timing-attacks/
"""
@spec secure_compare(binary(), binary()) :: boolean()
def secure_compare(left, right) when is_binary(left) and is_binary(right) do
byte_size(left) == byte_size(right) and secure_compare(left, right, 0)
end
defp secure_compare(<<x, left::binary>>, <<y, right::binary>>, acc) do
xorred = x ^^^ y
secure_compare(left, right, acc ||| xorred)
end
defp secure_compare(<<>>, <<>>, acc) do
acc === 0
end
end
|
lib/plug/crypto.ex
| 0.818592
| 0.612339
|
crypto.ex
|
starcoder
|
defmodule CircuitsLED do
@moduledoc """
Control LEDs
This module provides a consistent way of controlling LEDs on devices
running Nerves or Linux.
Use cases I'm thinking about:
1. Control `/sys/class` LEDs and GPIO LEDs using functions to make
things easier on users who don't know about Linux's LED subsystem.
Also, since it's really hard for most Nerves users (and me too) to
add an LED to Linux's LED subsystem, I like the idea of controlling
LEDs via GPIOs without going straight to `circuits_gpio`.
2. Blink LEDs
3. Combine one or more LEDs and be able to set them with a color. For
example, it's common to have a red, green, and blue LED in one device.
It would be nice to set them with one API call rather than make
a three separate calls
4. Support Linux's LED subsystems triggers so that LEDs can be
used for CPU, disk, and network activity.
Questions?
1. Expose a GenServer or not? Decided yes.
2. Require users to "open" LEDs before use? I.e., is the user responsible for
resource management? Decided yes, but since GenServer, they'll be
start_linking.
"""
@typedoc """
Named LEDs are LEDs that have been given names in Linux
For example, the Beaglebone Black boards have the following named LEDs:
* "beaglebone:green:usr0"
* "beaglebone:green:usr1"
* "beaglebone:green:usr2"
* "beaglebone:green:usr3"
"""
@type named_led :: String.t()
@typedoc """
A GPIO LED is an LED that's connected via a GPIO pin
"""
@type gpio_led :: {:gpio, non_neg_integer()}
@typedoc """
A LED
"""
@type led_spec :: named_led() | gpio_led()
@typedoc """
Maybe a reference to the LED from when it has been opened or
maybe the led_spec()?
"""
@type led_ref :: term()
@typedoc """
TBD LED color
Nearly all LEDs controlled by this API can only be turned on and off.
Sometimes there are bicolor and RGB LEDs and sometimes it's possible
to set the LED brightness.
"""
@type color :: term()
@doc """
Return a list of LEDs that have names
"""
@spec named_leds() :: [named_led()]
def named_leds() do
case File.ls("/sys/class/leds") do
{:ok, leds} -> leds
_ -> []
end
end
@doc """
TODO: Refactor this to make each LED be a proper GenServer so that it can be supervised,
serialize multi-operation requests to Linux, and do things like blink.
"""
@spec start_link(led_spec(), keyword()) :: GenServer.on_start()
def start_link(led, opts \\ []) do
GenServer.start_link(__MODULE__, led, opts)
end
@doc """
Turn the specified LED off
"""
@spec off(led_ref()) :: :ok
def off(_led) do
:ok
end
@doc """
Turn the specified LED on
"""
@spec on(led_ref(), color()) :: :ok
def on(_led, _color) do
:ok
end
@doc """
Turn the LED on and off repeatedly
The LED first turns on for the specified on_time in milliseconds
and then turns off and then repeats. It repeats `n` times. `n = 0`
means that it repeats indefinitely.
"""
@spec blink(led_ref(), non_neg_integer(), non_neg_integer(), non_neg_integer()) :: :ok
def blink(_led, _on_time, _off_time, _n \\ 0) do
:ok
end
@doc """
Toggle the state of the LED
"""
@spec toggle(led_ref()) :: :ok
def toggle(_led) do
:ok
end
@doc """
Return true if the LED is on
"""
@spec is_lit(led_ref()) :: false
def is_lit(_led) do
false
end
@doc """
Return a list of triggers supported by the LED
LEDs provided by Linux's `/sys/class` interface can be triggered
by system events to show off things like CPU, disk and network
usage. This function returns supported triggers.
"""
def triggers(_led) do
[]
end
@doc """
Support setting triggers on LEDs
Call `triggers/1` to get a list of what triggers are available for the
LED.
Put the LED into heartbeat mode:
iex> CircuitsLED.set_trigger(led, "heartbeat")
:ok
"""
def set_trigger(_led, _trigger, _options \\ []) do
:ok
end
end
|
lib/circuits_led.ex
| 0.743634
| 0.457076
|
circuits_led.ex
|
starcoder
|
defmodule RDF.Literal.Generic do
@moduledoc """
A generic `RDF.Literal.Datatype` for literals of an unknown datatype.
"""
defstruct [:value, :datatype]
use RDF.Literal.Datatype,
name: "generic",
id: nil
alias RDF.Literal.Datatype
alias RDF.{Literal, IRI}
import RDF.Guards
@type t :: %__MODULE__{
value: String.t(),
datatype: String.t()
}
@impl Datatype
@spec new(any, String.t() | IRI.t() | keyword) :: Literal.t()
def new(value, datatype_or_opts \\ [])
def new(value, %IRI{} = datatype), do: new(value, datatype: datatype)
def new(value, datatype) when is_binary(datatype) or maybe_ns_term(datatype),
do: new(value, datatype: datatype)
def new(value, opts) do
%Literal{
literal: %__MODULE__{
value: value,
datatype: Keyword.get(opts, :datatype) |> normalize_datatype()
}
}
end
defp normalize_datatype(nil), do: nil
defp normalize_datatype(""), do: nil
defp normalize_datatype(%IRI{} = datatype), do: to_string(datatype)
defp normalize_datatype(datatype) when maybe_ns_term(datatype),
do: datatype |> RDF.iri() |> to_string()
defp normalize_datatype(datatype), do: datatype
@impl Datatype
@spec new!(any, String.t() | IRI.t() | keyword) :: Literal.t()
def new!(value, datatype_or_opts \\ []) do
literal = new(value, datatype_or_opts)
if valid?(literal) do
literal
else
raise ArgumentError,
"#{inspect(value)} with datatype #{inspect(literal.literal.datatype)} is not a valid #{inspect(__MODULE__)}"
end
end
@impl Datatype
def datatype_id(%Literal{literal: literal}), do: datatype_id(literal)
def datatype_id(%__MODULE__{} = literal), do: RDF.iri(literal.datatype)
@impl Datatype
def value(%Literal{literal: literal}), do: value(literal)
def value(%__MODULE__{} = literal), do: literal.value
@impl Datatype
def lexical(%Literal{literal: literal}), do: lexical(literal)
def lexical(%__MODULE__{} = literal), do: literal.value
@impl Datatype
def canonical(%Literal{literal: %__MODULE__{}} = literal), do: literal
def canonical(%__MODULE__{} = literal), do: literal(literal)
@impl Datatype
def canonical?(%Literal{literal: literal}), do: canonical?(literal)
def canonical?(%__MODULE__{}), do: true
@impl Datatype
def valid?(%Literal{literal: %__MODULE__{} = literal}), do: valid?(literal)
def valid?(%__MODULE__{datatype: datatype}) when is_binary(datatype), do: true
def valid?(_), do: false
@doc """
Since generic literals don't support casting, always returns `nil`.
"""
def cast(_), do: nil
@impl Datatype
def do_cast(_), do: nil
@impl Datatype
def do_equal_value_same_or_derived_datatypes?(
%{datatype: datatype} = left,
%{datatype: datatype} = right
),
do: left == right
def do_equal_value_same_or_derived_datatypes?(_, _), do: nil
@impl Datatype
def do_compare(
%__MODULE__{datatype: datatype} = left_literal,
%__MODULE__{datatype: datatype} = right_literal
) do
case {left_literal.value, right_literal.value} do
{left_value, right_value} when left_value < right_value ->
:lt
{left_value, right_value} when left_value > right_value ->
:gt
_ ->
if equal_value?(left_literal, right_literal), do: :eq
end
end
def do_compare(_, _), do: nil
@impl Datatype
def update(literal, fun, opts \\ [])
def update(%Literal{literal: literal}, fun, opts), do: update(literal, fun, opts)
def update(%__MODULE__{} = literal, fun, _opts) do
literal
|> value()
|> fun.()
|> new(datatype: literal.datatype)
end
end
|
lib/rdf/literal/datatypes/generic.ex
| 0.902863
| 0.647791
|
generic.ex
|
starcoder
|
defmodule Elastic.Scroll do
@moduledoc ~S"""
Provides Elixir functions for ElasticSearch's scroll endpoint](https://www.elastic.co/guide/en/elasticsearch/reference/2.4/search-request-scroll.html#search-request-scroll).
You should probably be using `Elastic.Scroller` instead.
"""
alias Elastic.HTTP
alias Elastic.Index
@scroll_endpoint "_search/scroll"
@doc ~S"""
Starts a new scroll using [ElasticSearch's scroll endpoint](https://www.elastic.co/guide/en/elasticsearch/reference/2.4/search-request-scroll.html#search-request-scroll).
```elixir
Elastic.Scroll.start(%{
index: "answer",
body: %{} # a query can go here
size: 100,
keepalive: "1m"
})
```
"""
@spec start(%{
required(:index) => String.t,
required(:body) => map(),
required(:size) => pos_integer(),
required(:keepalive) => String.t
}) :: {:ok, 200, map()} | {:error, 404, map()}
def start(%{index: index, body: body, size: size, keepalive: keepalive}) do
body = body |> Map.merge(%{size: size})
HTTP.get("#{Index.name(index)}/_search?scroll=#{keepalive}", body: body)
end
@doc ~S"""
Fetches the next batch of results from a specified scroll.
```elixir
Elastic.Scroll.next(%{
scroll_id: "<a base64 scroll ID goes here>"
keepalive: "1m"
})
```
"""
@spec next(%{
required(:scroll_id) => String.t,
required(:keepalive) => String.t
}) :: {:ok, 200, map()} | {:error, 404, map()} | {:error, pos_integer(), map()}
def next(%{scroll_id: scroll_id, keepalive: keepalive}) do
HTTP.get(@scroll_endpoint, body: %{scroll_id: scroll_id, scroll: keepalive})
end
@doc ~S"""
Clears the specified scroll by calling [this endpoint](https://www.elastic.co/guide/en/elasticsearch/reference/2.4/search-request-scroll.html#_clear_scroll_api)
```elixir
Elastic.Scroll.clear("<Base64 Scroll ID goes here>")
```
Can also be used to clear multiple scrolls at the same time:
```elixir
Elastic.Scroll.clear([
"<Base64 Scroll ID goes here>",
"<Base64 Scroll ID goes here>"
])
```
"""
@spec clear(String.t | [String.t, ...])
:: {:ok, 200, map()} | {:error, 404, map()} | {:error, pos_integer(), map()}
def clear(scroll_id) do
HTTP.delete(@scroll_endpoint, body: %{scroll_id: scroll_id})
end
end
|
lib/elastic/scroll.ex
| 0.921592
| 0.659838
|
scroll.ex
|
starcoder
|
defmodule Tablespoon.Communicator.Modem do
@moduledoc """
Communication with a modem at an intersection.
The communication is line-based.
When we first connect, we expect an "OK" line, unless passed the "expect_ok?: false" option is passed
To request priority at an intersection, we set one of the relays to 1:
> "AT*RELAYOUT2=1" -> "OK"
2 is North, 3 East, 4 South, and 5 West.
To cancel a priority request, we set the relay to 0:
> "AT*RELAYOUT2=0" -> "OK"
If we receive multiple requests for priority in a given direction, we don't
send the cancel message until the last vehicle requests a cancelation.
> "AT*RELAYOUT3=1" -> "OK"
> "AT*RELAYOUT3=1" -> "OK"
> "AT*RELAYOUT3=0" (skipped, not sent)
> "AT*RELAYOUT3=0" -> "OK"
However, if we don't receive a cancel from the vehicle after some time
(`@open_request_timeout`) we will send a cancellation in the background and
log a warning.
"""
require Logger
@behaviour Tablespoon.Communicator
@enforce_keys [:transport]
defstruct @enforce_keys ++
[
buffer: "",
queue: :queue.new(),
open_vehicles: %{},
approach_counts: %{:north => 0, :east => 0, :south => 0, :west => 0},
expect_ok?: true,
connection_state: :not_connected,
id_ref: nil,
keep_alive_ref: nil
]
# how often we send a newline to keep the connection open
@keep_alive_timeout 180_000
# how long an in-flight request can not have a response before we consider it stale and re-connect
@stale_query_timeout 30_000
# how long a request can live without a cancel before we send a cancel ourselves
@open_request_timeout 300_000
alias Tablespoon.{Protocol.Line, Query, Transport}
@impl Tablespoon.Communicator
def new(transport, opts \\ []) do
expect_ok? = Keyword.get(opts, :expect_ok?, true)
%__MODULE__{transport: transport, expect_ok?: expect_ok?}
end
@impl Tablespoon.Communicator
def connect(%__MODULE__{} = comm) do
{:ok, comm, events} = do_close(comm, :reconnect, [])
with {:ok, transport} <- Transport.connect(comm.transport) do
id_ref = make_ref()
connection_state =
if comm.expect_ok? do
:awaiting_ok
else
Kernel.send(self(), {id_ref, :timeout})
:connected
end
comm = %{
comm
| id_ref: id_ref,
transport: transport,
expect_ok?: comm.expect_ok?,
connection_state: connection_state
}
{:ok, comm, events}
end
end
@impl Tablespoon.Communicator
def close(%__MODULE__{} = comm) do
comm =
comm.queue
|> :queue.to_list()
|> Enum.reduce(comm, fn q, comm ->
with %{type: :request} <- q,
q = %{q | type: :cancel},
{:ok, transport} <- send_query(comm, q) do
%{comm | transport: transport}
else
_ -> comm
end
end)
do_close(comm, :close, [])
end
@impl Tablespoon.Communicator
def send(%__MODULE__{} = comm, %Query{} = q) do
with {:ok, comm} <- check_stale_queries(comm) do
comm = track_open_vehicles(comm, q)
approach_counts = update_approach_counts(comm, q)
if q.type == :request or Map.fetch!(approach_counts, q.approach) == 0 do
case send_query(comm, q) do
{:ok, transport} ->
queue = :queue.in(q, comm.queue)
comm = %{comm | transport: transport, queue: queue, approach_counts: approach_counts}
{:ok, comm, []}
{:error, e} ->
{:ok, comm, [{:failed, q, e}]}
end
else
# ignoring an extra cancel
{:ok, %{comm | approach_counts: approach_counts}, [sent: q]}
end
end
end
defp track_open_vehicles(comm, q) do
open_vehicles =
case q.type do
:request ->
ref =
Process.send_after(self(), {comm.id_ref, :query_timeout, q}, @open_request_timeout)
track_open_vehicles_request(comm, q.vehicle_id, ref)
:cancel ->
track_open_vehicles_cancel(comm, q.vehicle_id)
end
%{comm | open_vehicles: open_vehicles}
end
defp track_open_vehicles_request(comm, vehicle_id, ref) do
open_vehicles = Map.put_new_lazy(comm.open_vehicles, vehicle_id, &:queue.new/0)
Map.update!(open_vehicles, vehicle_id, &:queue.in(ref, &1))
end
defp track_open_vehicles_cancel(comm, vehicle_id) do
with {:ok, queue} <- Map.fetch(comm.open_vehicles, vehicle_id),
{{:value, ref}, queue} <- :queue.out(queue) do
_ = Process.cancel_timer(ref)
if :queue.is_empty(queue) do
Map.delete(comm.open_vehicles, vehicle_id)
else
Map.put(comm.open_vehicles, vehicle_id, queue)
end
else
:error ->
# couldn't find the vehicle in the map
comm.open_vehicles
{:empty, _queue} ->
# I don't believe this case can happen, as we delete the entry if the
# queue is empty above. But we handle this case anyways, in the same
# way. -ps
# coveralls-ignore-start
Map.delete(comm.open_vehicles, vehicle_id)
# coveralls-ignore-stop
end
end
defp update_approach_counts(comm, q) do
count_change =
if q.type == :request do
&(&1 + 1)
else
# ensure we never go below 0
&max(&1 - 1, 0)
end
Map.update!(comm.approach_counts, q.approach, count_change)
end
def send_query(comm, q) do
data =
q
|> query_iodata()
|> Line.encode()
Transport.send(comm.transport, data)
end
@impl Tablespoon.Communicator
def stream(comm, message)
def stream(%__MODULE__{id_ref: id_ref} = comm, {id_ref, :timeout}) do
with {:ok, comm} <- check_stale_queries(comm) do
_ = if comm.keep_alive_ref, do: Process.cancel_timer(comm.keep_alive_ref)
case Transport.send(comm.transport, "\n") do
{:ok, transport} ->
ref = Process.send_after(self(), {id_ref, :timeout}, @keep_alive_timeout)
{:ok, %{comm | keep_alive_ref: ref, transport: transport}, []}
{:error, e} ->
{:ok, comm, [{:error, e}]}
end
end
end
def stream(%__MODULE__{id_ref: id_ref} = comm, {id_ref, :query_timeout, query}) do
vehicle_id = query.vehicle_id
case Map.fetch(comm.open_vehicles, vehicle_id) do
{:ok, queue} ->
open_vehicles = track_open_vehicles_cancel(comm, vehicle_id)
comm = %{comm | open_vehicles: open_vehicles}
if :queue.is_empty(queue) do
# this case shouldn't be possible (we delete empty queues) but we handle it anyways -ps
# coveralls-ignore-start
{:ok, comm, []}
# coveralls-ignore-stop
else
pretend_cancel(comm, query)
end
:error ->
# no open requests for this vehicle, nothing to do!
{:ok, comm, []}
end
end
def stream(%__MODULE__{} = comm, message) do
with {:ok, transport, results} <- Transport.stream(comm.transport, message),
{:ok, comm} <- check_stale_queries(comm) do
comm = %{comm | transport: transport}
Enum.reduce_while(results, {:ok, comm, []}, &handle_stream_results/2)
end
end
defp pretend_cancel(comm, q) do
cancel_query = Query.update(q, type: :cancel)
approach_counts = update_approach_counts(comm, cancel_query)
original_event_time_iso =
q.event_time
|> DateTime.from_unix!(:native)
|> DateTime.truncate(:second)
|> DateTime.to_iso8601()
event_time_iso = DateTime.to_iso8601(DateTime.utc_now())
case send_query(comm, cancel_query) do
{:ok, transport} ->
# we put the connection into the :awaiting_ok state to eat the
# response to our fake cancel message.
Logger.info(
"sending fake cancel alias=#{q.intersection_alias} pid=#{inspect(self())} type=:cancel q_id=#{q.id} v_id=#{q.vehicle_id} approach=#{q.approach} event_time=#{event_time_iso} original_event_time=#{original_event_time_iso}"
)
comm = %{
comm
| transport: transport,
approach_counts: approach_counts,
connection_state: :awaiting_ok
}
{:ok, comm, []}
{:error, e} ->
do_close(comm, e, [], [{:error, e}])
end
end
defp handle_stream_results({:data, binary}, {:ok, comm, events}) do
comm = %{comm | buffer: comm.buffer <> binary}
handle_buffer(comm, events)
end
defp handle_stream_results(:closed, {:ok, comm, events}) do
{:halt, do_close(comm, :closed, events, [{:error, :closed}])}
end
defp handle_buffer(comm, events) do
case Line.decode(comm.buffer) do
{:ok, line, rest} ->
comm = %{comm | buffer: rest}
{:ok, comm, new_events} = handle_line(comm, line)
handle_buffer(comm, events ++ new_events)
{:error, :too_short} ->
{:cont, {:ok, comm, events}}
end
end
defp handle_line(comm, "") do
{:ok, comm, []}
end
defp handle_line(%{connection_state: :connected} = comm, "OK") do
case :queue.out(comm.queue) do
{{:value, q}, queue} ->
comm = %{comm | queue: queue}
{:ok, comm, [sent: q]}
{:empty, queue} ->
comm = %{comm | queue: queue}
_ = Logger.info("#{__MODULE__} unexpected OK response, ignoring...")
{:ok, comm, []}
end
end
defp handle_line(comm, "AT*RELAYOUT" <> _) do
# echo of our request. sometimes the modems don't send the initial OK
# first, so we ignore the echo either way.
{:ok, comm, []}
end
defp handle_line(%{connection_state: :connected} = comm, line) do
error =
if line == "ERROR" do
:error
else
{:unknown, line}
end
{response, queue} = :queue.out(comm.queue)
comm = %{comm | queue: queue}
results =
case response do
{:value, q} ->
[{:failed, q, error}]
:empty ->
_ =
Logger.warn(
"#{__MODULE__} unexpected response with empty queue comm=#{inspect(comm)} line=#{inspect(line)}"
)
[]
end
{:ok, comm, results}
end
defp handle_line(%{connection_state: :awaiting_ok} = comm, "OK") do
# we get an OK when we first connect
comm = %{comm | connection_state: :connected}
Kernel.send(self(), {comm.id_ref, :timeout})
{:ok, comm, []}
end
defp handle_line(%{connection_state: :awaiting_ok} = comm, "picocom" <> _) do
# picocom modems send a bunch of user-facing content when
# connecting. it's over when we get a "Terminal ready" line.
comm = %{comm | connection_state: :picocom_initial}
{:ok, comm, []}
end
defp handle_line(%{connection_state: :picocom_initial} = comm, "Terminal ready") do
Kernel.send(self(), {comm.id_ref, :timeout})
comm = %{comm | connection_state: :connected}
{:ok, comm, []}
end
defp handle_line(%{connection_state: :picocom_initial} = comm, _) do
{:ok, comm, []}
end
defp query_iodata(%Query{} = q) do
["AT*RELAYOUT", request_relay(q), ?=, request_value(q)]
end
defp request_relay(%{approach: :north}), do: ?2
defp request_relay(%{approach: :east}), do: ?3
defp request_relay(%{approach: :south}), do: ?4
defp request_relay(%{approach: :west}), do: ?5
defp request_value(%{type: :request}), do: ?1
defp request_value(%{type: :cancel}), do: ?0
defp check_stale_queries(comm) do
stale_responses =
:queue.filter(
fn q -> Query.processing_time(q, :millisecond) > @stale_query_timeout end,
comm.queue
)
if :queue.is_empty(stale_responses) do
{:ok, comm}
else
do_close(comm, :stale, [], [{:error, :stale}])
end
end
defp do_close(comm, reason, events, tail_events \\ []) do
failures =
for q <- :queue.to_list(comm.queue) do
{:failed, q, reason}
end
# cancel keep-alive timer
_ = if comm.keep_alive_ref, do: Process.cancel_timer(comm.keep_alive_ref)
# cancel any open vehicle timers
_ =
for q <- Map.values(comm.open_vehicles),
ref <- :queue.to_list(q) do
_ = Process.cancel_timer(ref)
end
transport = Transport.close(comm.transport)
comm = %__MODULE__{transport: transport, expect_ok?: comm.expect_ok?}
{:ok, comm, events ++ failures ++ tail_events}
end
end
|
lib/tablespoon/communicator/modem.ex
| 0.812086
| 0.487856
|
modem.ex
|
starcoder
|
defmodule Waffle.Storage.Google.CloudStorage do
@moduledoc """
The main storage integration for Waffle, this acts primarily as a wrapper
around `Google.Api.Storage.V1`. To use this module with Waffle, simply set
your `:storage` config appropriately:
```elixir
config :waffle, storage: Waffle.Storage.Google.CloudStorage
```
Ensure you have a valid bucket set, either through the configs or as an
environment variable, otherwise all calls will fail. The credentials available
through `Goth` must have the appropriate level of access to the bucket,
otherwise some (or all) calls may fail.
"""
@full_control_scope "https://www.googleapis.com/auth/devstorage.full_control"
alias GoogleApi.Storage.V1.Connection
alias GoogleApi.Storage.V1.Api.Objects
alias GoogleApi.Storage.V1.Model.Object
alias Waffle.Storage.Google.Util
alias Waffle.Types
@type object_or_error :: {:ok, GoogleApi.Storage.V1.Model.Object.t} | {:error, Tesla.Env.t}
@doc """
Put a Waffle file in a Google Cloud Storage bucket.
"""
@spec put(Types.definition, Types.version, Types.meta) :: object_or_error
def put(definition, version, meta) do
path = path_for(definition, version, meta)
acl = definition.acl(version, meta)
gcs_options =
definition
|> get_gcs_options(version, meta)
|> ensure_keyword_list()
|> Keyword.put(:acl, acl)
|> Enum.into(%{})
insert(conn(), bucket(definition), path, data(meta), gcs_options)
end
@doc """
Delete a file from a Google Cloud Storage bucket.
"""
@spec put(Types.definition, Types.version, Types.meta) :: object_or_error
def delete(definition, version, meta) do
Objects.storage_objects_delete(
conn(),
bucket(definition),
path_for(definition, version, meta) |> URI.encode_www_form()
)
end
@doc """
Retrieve the public URL for a file in a Google Cloud Storage bucket. Uses
`Waffle.Storage.Google.UrlV2` by default, which uses v2 signing if a signed
URL is requested, but this can be overriden in the options list or in the
application configs by setting `:url_builder` to any module that imlements the
behavior of `Waffle.Storage.Google.Url`.
"""
@spec url(Types.definition, Types.version, Types.meta, Keyword.t) :: String.t
def url(definition, version, meta, opts \\ []) do
signer = Util.option(opts, :url_builder, Waffle.Storage.Google.UrlV2)
signer.build(definition, version, meta, opts)
end
@doc """
Constructs a new connection object with scoped authentication. If no scope is
provided, the `devstorage.full_control` scope is used as a default.
"""
@spec conn(String.t) :: Tesla.Env.client
def conn(scope \\ @full_control_scope) do
token_store = Application.get_env(:waffle, :token_fetcher, Waffle.Storage.Google.Token.DefaultFetcher)
token_store.get_token(scope)
|> Connection.new()
end
@doc """
Returns the bucket for file uploads.
"""
@spec bucket(Types.definition) :: String.t
def bucket(definition), do: Util.var(definition.bucket())
@doc """
Returns the storage directory **within a bucket** to store the file under.
"""
@spec storage_dir(Types.definition, Types.version, Types.meta) :: String.t
def storage_dir(definition, version, meta) do
version
|> definition.storage_dir(meta)
|> Util.var()
end
@doc """
Returns the full file path for the upload destination.
"""
@spec path_for(Types.definition, Types.version, Types.meta) :: String.t
def path_for(definition, version, meta) do
definition
|> storage_dir(version, meta)
|> Path.join(fullname(definition, version, meta))
end
@doc """
A wrapper for `Waffle.Definition.Versioning.resolve_file_name/3`.
"""
@spec fullname(Types.definition, Types.version, Types.meta) :: String.t
def fullname(definition, version, meta) do
Waffle.Definition.Versioning.resolve_file_name(definition, version, meta)
end
@spec data(Types.file) :: {:file | :binary, String.t}
defp data({%{binary: nil, path: path}, _}), do: {:file, path}
defp data({%{binary: data}, _}), do: {:binary, data}
@spec insert(Tesla.Env.client, String.t, String.t, {:file | :binary, String.t}, String.t) :: object_or_error
defp insert(conn, bucket, name, {:file, path}, gcs_options) do
object = %Object{name: name}
|> Map.merge(gcs_options)
Objects.storage_objects_insert_simple(
conn,
bucket,
"multipart",
object,
path
)
end
defp insert(conn, bucket, name, {:binary, data}, _gcs_options) do
Util.storage_objects_insert(
conn,
bucket,
[
body: data,
name: name,
]
)
end
defp get_gcs_options(definition, version, {file, scope}) do
try do
apply(definition, :gcs_object_headers, [version, {file, scope}])
rescue
UndefinedFunctionError ->
[]
end
end
defp ensure_keyword_list(list) when is_list(list), do: list
defp ensure_keyword_list(map) when is_map(map), do: Map.to_list(map)
end
|
lib/waffle/storage/google/cloud_storage.ex
| 0.846704
| 0.71889
|
cloud_storage.ex
|
starcoder
|
defmodule EctoEnum do
@moduledoc """
Provides `defenum/2` macro for defining an Enum Ecto type.
"""
@doc """
Defines an enum custom `Ecto.Type`.
It can be used like any other `Ecto.Type` by passing it to a field in your model's
schema block. For example:
import EctoEnum
defenum StatusEnum, registered: 0, active: 1, inactive: 2, archived: 3
defmodule User do
use Ecto.Schema
schema "users" do
field :status, StatusEnum
end
end
In the above example, the `:status` will behave like an enum and will allow you to
pass an `integer`, `atom` or `string` to it. This applies to saving the model,
invoking `Ecto.Changeset.cast/4`, or performing a query on the status field. Let's
do a few examples:
iex> user = Repo.insert!(%User{status: 0})
iex> Repo.get(User, user.id).status
:registered
iex> %{changes: changes} = cast(%User{}, %{"status" => "Active"}, ~w(status), [])
iex> changes.status
:active
iex> from(u in User, where: u.status == :registered) |> Repo.all() |> length
1
Passing an invalid value to a `Ecto.Changeset.cast/3` will add an error to `changeset.errors`
field.
iex> changeset = cast(%User{}, %{"status" => "retroactive"}, ~w(status), [])
iex> changeset.errors
[status: "is invalid"]
Passing an invalid value directly into a model struct will in an error when calling
`Repo` functions.
iex> Repo.insert!(%User{status: :none})
** (Ecto.ChangeError) `"none"` is not a valid enum value for `EctoEnumTest.StatusEnum`.
Valid enum values are `[0, 1, 2, 3, :registered, :active, :inactive, :archived, "active",
"archived", "inactive", "registered"]`
The enum type `StatusEnum` will also have a reflection function for inspecting the
enum map in runtime.
iex> StatusEnum.__enum_map__()
[registered: 0, active: 1, inactive: 2, archived: 3]
"""
defmacro defenum(module, type, enum, options \\ []) do
EctoEnum.Postgres.defenum(module, type, enum, options)
end
defmacro defenum(module, enum) do
quote do
kw = unquote(enum) |> Macro.escape()
defmodule unquote(module) do
@behaviour Ecto.Type
@atom_int_kw kw
@int_atom_map for {atom, int} <- kw, into: %{}, do: {int, atom}
@string_int_map for {atom, int} <- kw, into: %{}, do: {Atom.to_string(atom), int}
@string_atom_map for {atom, int} <- kw, into: %{}, do: {Atom.to_string(atom), atom}
@valid_values Keyword.values(@atom_int_kw) ++
Keyword.keys(@atom_int_kw) ++ Map.keys(@string_int_map)
def type, do: :integer
def cast(term) do
EctoEnum.Type.cast(term, @int_atom_map, @string_atom_map)
end
def load(int) when is_integer(int) do
Map.fetch(@int_atom_map, int)
end
def dump(term) do
case EctoEnum.Type.dump(term, @atom_int_kw, @string_int_map, @int_atom_map) do
:error ->
msg =
"Value `#{inspect(term)}` is not a valid enum for `#{inspect(__MODULE__)}`. " <>
"Valid enums are `#{inspect(__valid_values__())}`"
raise Ecto.ChangeError,
message: msg
value ->
value
end
end
def valid_value?(value) do
Enum.member?(@valid_values, value)
end
# Reflection
def __enum_map__(), do: @atom_int_kw
def __valid_values__(), do: @valid_values
end
end
end
defmodule Type do
@spec cast(any, map, map) :: {:ok, atom} | :error
def cast(atom, int_atom_map, _) when is_atom(atom) do
if atom in Map.values(int_atom_map) do
{:ok, atom}
else
:error
end
end
def cast(string, _, string_atom_map) when is_binary(string) do
Map.fetch(string_atom_map, string)
end
def cast(int, int_atom_map, _) when is_integer(int) do
Map.fetch(int_atom_map, int)
end
def cast(_, _, _), do: :error
@spec dump(any, [{atom(), any()}], map, map) :: {:ok, integer} | :error
def dump(integer, _, _, int_atom_map) when is_integer(integer) do
if int_atom_map[integer] do
{:ok, integer}
else
:error
end
end
def dump(atom, atom_int_kw, _, _) when is_atom(atom) do
Keyword.fetch(atom_int_kw, atom)
end
def dump(string, _, string_int_map, _) when is_binary(string) do
Map.fetch(string_int_map, string)
end
def dump(_), do: :error
end
alias Ecto.Changeset
@spec validate_enum(
Ecto.Changeset.t(),
atom,
(atom, String.t(), list(String.t() | integer | atom) -> String.t())
) :: Ecto.Changeset.t()
def validate_enum(changeset, field, error_msg \\ &default_error_msg/3) do
Changeset.validate_change(changeset, field, :validate_enum, fn field, value ->
type = changeset.types[field]
error_msg = error_msg.(field, value, type.__valid_values__())
if type.valid_value?(value) do
[]
else
Keyword.put([], field, error_msg)
end
end)
end
defp default_error_msg(field, value, valid_values) do
"Value `#{inspect(value)}` is not a valid enum for `#{inspect(field)}` field. " <>
"Valid enums are `#{inspect(valid_values)}`"
end
end
|
lib/ecto_enum.ex
| 0.81841
| 0.475423
|
ecto_enum.ex
|
starcoder
|
import Kernel, except: [inspect: 1]
import Inspect.Algebra
alias Code.Identifier
defprotocol Inspect do
@moduledoc """
The `Inspect` protocol is responsible for converting any Elixir
data structure into an algebra document. This document is then
formatted, either in pretty printing format or a regular one.
The `inspect/2` function receives the entity to be inspected
followed by the inspecting options, represented by the struct
`Inspect.Opts`.
Inspection is done using the functions available in `Inspect.Algebra`.
## Examples
Many times, inspecting a structure can be implemented in function
of existing entities. For example, here is `MapSet`'s `inspect`
implementation:
defimpl Inspect, for: MapSet do
import Inspect.Algebra
def inspect(dict, opts) do
concat(["#MapSet<", to_doc(MapSet.to_list(dict), opts), ">"])
end
end
The `concat/1` function comes from `Inspect.Algebra` and it
concatenates algebra documents together. In the example above,
it is concatenating the string `"MapSet<"` (all strings are
valid algebra documents that keep their formatting when pretty
printed), the document returned by `Inspect.Algebra.to_doc/2` and the
other string `">"`.
Since regular strings are valid entities in an algebra document,
an implementation of inspect may simply return a string,
although that will devoid it of any pretty-printing.
## Error handling
In case there is an error while your structure is being inspected,
Elixir will raise an `ArgumentError` error and will automatically fall back
to a raw representation for printing the structure.
You can however access the underlying error by invoking the Inspect
implementation directly. For example, to test Inspect.MapSet above,
you can invoke it as:
Inspect.MapSet.inspect(MapSet.new(), %Inspect.Opts{})
"""
# Handle structs in Any
@fallback_to_any true
def inspect(term, opts)
end
defimpl Inspect, for: Atom do
require Macro
def inspect(atom, opts) do
color(Identifier.inspect_as_atom(atom), color_key(atom), opts)
end
defp color_key(atom) when is_boolean(atom), do: :boolean
defp color_key(nil), do: :nil
defp color_key(_), do: :atom
end
defimpl Inspect, for: BitString do
def inspect(term, opts) when is_binary(term) do
%Inspect.Opts{binaries: bins, base: base, printable_limit: printable_limit} = opts
if base == :decimal and
(bins == :as_strings or (bins == :infer and String.printable?(term, printable_limit))) do
inspected =
case Identifier.escape(term, ?", printable_limit) do
{escaped, ""} -> [?", escaped, ?"]
{escaped, _} -> [?", escaped, ?", " <> ..."]
end
color(IO.iodata_to_binary(inspected), :string, opts)
else
inspect_bitstring(term, opts)
end
end
def inspect(term, opts) do
inspect_bitstring(term, opts)
end
defp inspect_bitstring("", opts) do
color("<<>>", :binary, opts)
end
defp inspect_bitstring(bitstring, opts) do
left = color("<<", :binary, opts)
right = color(">>", :binary, opts)
inner = each_bit(bitstring, opts.limit, opts)
concat(concat(left, nest(inner, :cursor)), right)
end
defp each_bit(_, 0, _) do
"..."
end
defp each_bit(<<>>, _counter, _opts) do
:doc_nil
end
defp each_bit(<<h::8>>, _counter, opts) do
Inspect.Integer.inspect(h, opts)
end
defp each_bit(<<h, t::bitstring>>, counter, opts) do
flex_glue(concat(Inspect.Integer.inspect(h, opts), ","),
each_bit(t, decrement(counter), opts))
end
defp each_bit(bitstring, _counter, opts) do
size = bit_size(bitstring)
<<h::size(size)>> = bitstring
Inspect.Integer.inspect(h, opts) <> "::size(" <> Integer.to_string(size) <> ")"
end
@compile {:inline, decrement: 1}
defp decrement(:infinity), do: :infinity
defp decrement(counter), do: counter - 1
end
defimpl Inspect, for: List do
def inspect([], opts) do
color("[]", :list, opts)
end
# TODO: Remove :char_list and :as_char_lists handling in 2.0
def inspect(term, opts) do
%Inspect.Opts{charlists: lists, char_lists: lists_deprecated, printable_limit: printable_limit} = opts
lists =
if lists == :infer and lists_deprecated != :infer do
case lists_deprecated do
:as_char_lists ->
IO.warn "the :char_lists inspect option and its :as_char_lists " <>
"value are deprecated, use the :charlists option and its " <>
":as_charlists value instead"
:as_charlists
_ ->
IO.warn "the :char_lists inspect option is deprecated, use :charlists instead"
lists_deprecated
end
else
lists
end
open = color("[", :list, opts)
sep = color(",", :list, opts)
close = color("]", :list, opts)
cond do
lists == :as_charlists or (lists == :infer and List.ascii_printable?(term, printable_limit)) ->
inspected =
case Identifier.escape(IO.chardata_to_string(term), ?', printable_limit) do
{escaped, ""} -> [?', escaped, ?']
{escaped, _} -> [?', escaped, ?', " ++ ..."]
end
IO.iodata_to_binary inspected
keyword?(term) ->
container_doc(open, term, close, opts, &keyword/2, separator: sep, break: :strict)
true ->
container_doc(open, term, close, opts, &to_doc/2, separator: sep)
end
end
@doc false
def keyword({key, value}, opts) do
key = color(Identifier.inspect_as_key(key), :atom, opts)
concat(key, to_doc(value, opts))
end
@doc false
def keyword?([{key, _value} | rest]) when is_atom(key) do
case Atom.to_charlist(key) do
'Elixir.' ++ _ -> false
_ -> keyword?(rest)
end
end
def keyword?([]), do: true
def keyword?(_other), do: false
end
defimpl Inspect, for: Tuple do
def inspect(tuple, opts) do
open = color("{", :tuple, opts)
sep = color(",", :tuple, opts)
close = color("}", :tuple, opts)
container_doc(open, Tuple.to_list(tuple), close, opts, &to_doc/2, separator: sep)
end
end
defimpl Inspect, for: Map do
def inspect(map, opts) do
inspect(map, "", opts)
end
def inspect(map, name, opts) do
map = :maps.to_list(map)
open = color("%" <> name <> "{", :map, opts)
sep = color(",", :map, opts)
close = color("}", :map, opts)
container_doc(open, map, close, opts, traverse_fun(map, opts), separator: sep, break: :strict)
end
defp traverse_fun(list, opts) do
if Inspect.List.keyword?(list) do
&Inspect.List.keyword/2
else
sep = color(" => ", :map, opts)
&to_map(&1, &2, sep)
end
end
defp to_map({key, value}, opts, sep) do
concat(
concat(to_doc(key, opts), sep),
to_doc(value, opts)
)
end
end
defimpl Inspect, for: Integer do
def inspect(term, %Inspect.Opts{base: base} = opts) do
inspected = Integer.to_string(term, base_to_value(base)) |> prepend_prefix(base)
color(inspected, :number, opts)
end
defp base_to_value(base) do
case base do
:binary -> 2
:decimal -> 10
:octal -> 8
:hex -> 16
end
end
defp prepend_prefix(value, :decimal), do: value
defp prepend_prefix(value, base) do
prefix = case base do
:binary -> "0b"
:octal -> "0o"
:hex -> "0x"
end
prefix <> value
end
end
defimpl Inspect, for: Float do
def inspect(term, opts) do
inspected = IO.iodata_to_binary(:io_lib_format.fwrite_g(term))
color(inspected, :number, opts)
end
end
defimpl Inspect, for: Regex do
def inspect(regex, opts) do
{escaped, _} = Identifier.escape(regex.source, ?/, :infinity, &escape_map/1)
source = IO.iodata_to_binary(['~r/', escaped, ?/, regex.opts])
color(source, :regex, opts)
end
defp escape_map(?\a), do: '\\a'
defp escape_map(?\f), do: '\\f'
defp escape_map(?\n), do: '\\n'
defp escape_map(?\r), do: '\\r'
defp escape_map(?\t), do: '\\t'
defp escape_map(?\v), do: '\\v'
defp escape_map(_), do: false
end
defimpl Inspect, for: Function do
def inspect(function, _opts) do
fun_info = :erlang.fun_info(function)
mod = fun_info[:module]
name = fun_info[:name]
if fun_info[:type] == :external and fun_info[:env] == [] do
"&#{Identifier.inspect_as_atom(mod)}.#{Identifier.inspect_as_function(name)}/#{fun_info[:arity]}"
else
case Atom.to_charlist(mod) do
'elixir_compiler_' ++ _ ->
if function_exported?(mod, :__RELATIVE__, 0) do
"#Function<#{uniq(fun_info)} in file:#{mod.__RELATIVE__}>"
else
default_inspect(mod, fun_info)
end
_ ->
default_inspect(mod, fun_info)
end
end
end
defp default_inspect(mod, fun_info) do
"#Function<#{uniq(fun_info)}/#{fun_info[:arity]} in " <>
"#{Identifier.inspect_as_atom(mod)}#{extract_name(fun_info[:name])}>"
end
defp extract_name([]) do
""
end
defp extract_name(name) do
case Identifier.extract_anonymous_fun_parent(name) do
{name, arity} ->
"." <> Identifier.inspect_as_function(name) <> "/" <> arity
:error ->
"." <> Identifier.inspect_as_function(name)
end
end
defp uniq(fun_info) do
Integer.to_string(fun_info[:new_index]) <> "." <>
Integer.to_string(fun_info[:uniq])
end
end
defimpl Inspect, for: PID do
def inspect(pid, _opts) do
"#PID" <> IO.iodata_to_binary(:erlang.pid_to_list(pid))
end
end
defimpl Inspect, for: Port do
def inspect(port, _opts) do
IO.iodata_to_binary(:erlang.port_to_list(port))
end
end
defimpl Inspect, for: Reference do
def inspect(ref, _opts) do
'#Ref' ++ rest = :erlang.ref_to_list(ref)
"#Reference" <> IO.iodata_to_binary(rest)
end
end
defimpl Inspect, for: Any do
def inspect(%module{} = struct, opts) do
try do
module.__struct__
rescue
_ -> Inspect.Map.inspect(struct, opts)
else
dunder ->
if :maps.keys(dunder) == :maps.keys(struct) do
pruned = :maps.remove(:__exception__, :maps.remove(:__struct__, struct))
colorless_opts = %{opts | syntax_colors: []}
Inspect.Map.inspect(pruned, Inspect.Atom.inspect(module, colorless_opts), opts)
else
Inspect.Map.inspect(struct, opts)
end
end
end
end
|
lib/elixir/lib/inspect.ex
| 0.747339
| 0.659172
|
inspect.ex
|
starcoder
|
defmodule ExAdmin.ErrorsHelper do
@moduledoc """
The primary purpose of this module is to take nested changeset errors created
by many_to_many and has many through relationships and change them into a format
that the forms can use to get the error message from the field name.
Changes sets such as:
#Ecto.Changeset<action: nil,
changes: %{phone_numbers: [#Ecto.Changeset<action: :update, changes: %{},
errors: [], data: #ContactDemo.PhoneNumber<>, valid?: true>,
#Ecto.Changeset<action: :update, changes: %{},
errors: [number: {"can't be blank", []}], data: #ContactDemo.PhoneNumber<>,
valid?: false>,
#Ecto.Changeset<action: :insert, changes: %{label: "Primary Phone"},
errors: [number: {"can't be blank", []}], data: #ContactDemo.PhoneNumber<>,
valid?: false>]},
errors: [], data: #ContactDemo.Contact<>, valid?: false>
need to be walked and each of the error messages needs to be flattened into its
appropriately namespaced verison.
To do this we need both the changeset and the schema used to generate the changeset.
This is required because we need to look at the schema to properly create the neccesary
form field names. For example, many_to_many association have attributes appended to the
field name so that we know it is a many to many field.
"""
def create_errors(changeset, schema) do
assoc_prefixes = create_prefix_map(schema)
flatten_errors(changeset, assoc_prefixes)
|> List.flatten
|> Enum.filter(fn(x) -> x != nil end)
end
defp flatten_errors(errors_array, assoc_prefixes, prefix \\ nil)
defp flatten_errors(%Ecto.Changeset{changes: changes, errors: errors}, assoc_prefixes, prefix) when errors == [] or is_nil(prefix) do
errors ++ flatten_errors(changes, assoc_prefixes, prefix)
end
defp flatten_errors(%Ecto.Changeset{changes: changes, errors: errors}, assoc_prefixes, prefix) do
Enum.map(errors, fn({k, v}) -> {concat_atoms(prefix, k), v} end) ++
flatten_errors(changes, assoc_prefixes, prefix)
end
defp flatten_errors(errors_array, assoc_prefixes, prefix) when is_list(errors_array) do
Enum.with_index(errors_array)
|> Enum.map(fn({x, i}) ->
prefix = concat_atoms(prefix, String.to_atom(Integer.to_string(i)))
flatten_errors(x, assoc_prefixes, prefix)
end)
end
defp flatten_errors(%{__struct__: _struct}, _, _), do: nil
defp flatten_errors(%{} = errors_map, assoc_prefixes, prefix) do
Enum.map(errors_map, fn({k, x}) ->
with k <- if(not is_atom(k), do: String.to_atom(k), else: k),
k <- if(Keyword.has_key?(assoc_prefixes, k), do: concat_atoms(k, assoc_prefixes[k]), else: k),
k <- if(prefix != nil, do: concat_atoms(prefix, k), else: k),
do: flatten_errors(x, assoc_prefixes, k)
end)
end
defp flatten_errors(_, _, _), do: nil
defp concat_atoms(first, second) do
"#{first}_#{second}" |> String.to_atom
end
defp create_prefix_map(schema) do
schema.__schema__(:associations)
|> Enum.map(&(schema.__schema__(:association, &1)))
|> Enum.map(fn(a) ->
case a do
%Ecto.Association.HasThrough{field: field} ->
{ field, :attributes }
%Ecto.Association.Has{field: field} ->
{ field, :attributes }
%Ecto.Association.ManyToMany{field: field} ->
{ field, :attributes }
_ ->
nil
end
end)
end
end
|
web/ex_admin/errors_helper.ex
| 0.649023
| 0.456834
|
errors_helper.ex
|
starcoder
|
defmodule Surface.Compiler do
@moduledoc """
Defines a behaviour that must be implemented by all HTML/Surface node translators.
This module also contains the main logic to translate Surface code.
"""
alias Surface.Compiler.Parser
alias Surface.IOHelper
alias Surface.AST
alias Surface.Compiler.Helpers
@stateful_component_types [
Surface.LiveComponent
]
@tag_directive_handlers [
Surface.Directive.TagAttrs,
Surface.Directive.Events,
Surface.Directive.Show,
Surface.Directive.If,
Surface.Directive.For,
Surface.Directive.Debug,
Surface.Directive.Hook
]
@component_directive_handlers [
Surface.Directive.Let,
Surface.Directive.ComponentProps,
Surface.Directive.If,
Surface.Directive.For,
Surface.Directive.Debug
]
@meta_component_directive_handlers [
Surface.Directive.If,
Surface.Directive.For,
Surface.Directive.Debug
]
@template_directive_handlers [Surface.Directive.Let]
@slot_directive_handlers [
Surface.Directive.SlotProps,
Surface.Directive.If,
Surface.Directive.For
]
@void_elements [
"area",
"base",
"br",
"col",
"command",
"embed",
"hr",
"img",
"input",
"keygen",
"link",
"meta",
"param",
"source",
"track",
"wbr"
]
defmodule ParseError do
defexception file: "", line: 0, message: "error parsing HTML/Surface"
@impl true
def message(exception) do
"#{Path.relative_to_cwd(exception.file)}:#{exception.line}: #{exception.message}"
end
end
defmodule CompileMeta do
defstruct [:line_offset, :file, :caller, :checks]
@type t :: %__MODULE__{
line_offset: non_neg_integer(),
file: binary(),
caller: Macro.Env.t(),
checks: Keyword.t(boolean())
}
end
@doc """
This function compiles a string into the Surface AST.This is used by ~H and Surface.Renderer to parse and compile templates.
A special note for line_offset: This is considered the line number for the first line in the string. If the first line of the
string is also the first line of the file, then this should be 1. If this is being called within a macro (say to process a heredoc
passed to ~H), this should be __CALLER__.line + 1.
"""
@spec compile(binary, non_neg_integer(), Macro.Env.t(), binary(), Keyword.t()) :: [
Surface.AST.t()
]
def compile(string, line_offset, caller, file \\ "nofile", opts \\ []) do
compile_meta = %CompileMeta{
line_offset: line_offset,
file: file,
caller: caller,
checks: opts[:checks] || []
}
string
|> Parser.parse()
|> case do
{:ok, nodes} ->
nodes
{:error, message, line} ->
raise %ParseError{line: line + line_offset - 1, file: file, message: message}
end
|> to_ast(compile_meta)
|> validate_component_structure(compile_meta, caller.module)
end
def to_live_struct(nodes, opts \\ []) do
Surface.Compiler.EExEngine.translate(nodes, opts)
end
def validate_component_structure(ast, meta, module) do
if is_stateful_component(module) do
validate_stateful_component(ast, meta)
end
ast
end
defp is_stateful_component(module) do
if Module.open?(module) do
Module.get_attribute(module, :component_type, Surface.BaseComponent) in @stateful_component_types
else
function_exported?(module, :component_type, 0) and
module.component_type() in @stateful_component_types
end
end
defp validate_stateful_component(ast, %CompileMeta{
line_offset: offset,
caller: %{function: {:render, _}} = caller
}) do
num_tags =
ast
|> Enum.filter(fn
%AST.Tag{} -> true
%AST.VoidTag{} -> true
%AST.Component{} -> true
_ -> false
end)
|> Enum.count()
cond do
num_tags == 0 ->
IOHelper.warn(
"stateful live components must have a HTML root element",
caller,
fn _ -> offset end
)
num_tags > 1 ->
IOHelper.warn(
"stateful live components must have a single HTML root element",
caller,
fn _ -> offset end
)
true ->
:noop
end
end
defp validate_stateful_component(_ast, %CompileMeta{}), do: nil
defp to_ast(nodes, compile_meta) do
for node <- nodes do
case convert_node_to_ast(node_type(node), node, compile_meta) do
{:ok, ast} ->
process_directives(ast)
{:error, {message, line}, meta} ->
IOHelper.warn(message, compile_meta.caller, fn _ -> line end)
%AST.Error{message: message, meta: meta}
{:error, {message, details, line}, meta} ->
details = if details, do: "\n\n" <> details, else: ""
IOHelper.warn(message <> details, compile_meta.caller, fn _ -> line end)
%AST.Error{message: message, meta: meta}
end
end
end
defp node_type({"#" <> _, _, _, _}), do: :macro_component
defp node_type({<<first, _::binary>>, _, _, _}) when first in ?A..?Z, do: :component
defp node_type({"template", _, _, _}), do: :template
defp node_type({"slot", _, _, _}), do: :slot
defp node_type({name, _, _, _}) when name in @void_elements, do: :void_tag
defp node_type({_, _, _, _}), do: :tag
defp node_type({:interpolation, _, _}), do: :interpolation
defp node_type(_), do: :text
defp process_directives(%{directives: directives} = node) do
directives
|> Enum.filter(fn %AST.Directive{module: mod} -> function_exported?(mod, :process, 2) end)
|> Enum.reduce(node, fn %AST.Directive{module: mod} = directive, node ->
mod.process(directive, node)
end)
end
defp process_directives(node), do: node
defp convert_node_to_ast(:text, text, _),
do: {:ok, %AST.Literal{value: text}}
defp convert_node_to_ast(:interpolation, {_, text, node_meta}, compile_meta) do
meta = Helpers.to_meta(node_meta, compile_meta)
expr = Helpers.interpolation_to_quoted!(text, meta)
Helpers.perform_assigns_checks(expr, compile_meta)
{:ok,
%AST.Interpolation{
original: text,
value: expr,
meta: meta
}}
end
defp convert_node_to_ast(:template, {_, attributes, children, node_meta}, compile_meta) do
meta = Helpers.to_meta(node_meta, compile_meta)
with {:ok, directives, attributes} <-
collect_directives(@template_directive_handlers, attributes, meta),
slot <- attribute_value(attributes, "slot", :default) do
{:ok,
%AST.Template{
name: slot,
children: to_ast(children, compile_meta),
directives: directives,
let: [],
meta: meta
}}
else
_ -> {:error, {"failed to parse template", meta.line}, meta}
end
end
defp convert_node_to_ast(:slot, {_, attributes, children, node_meta}, compile_meta) do
meta = Helpers.to_meta(node_meta, compile_meta)
defined_slot_names =
meta.caller.module
|> Surface.API.get_slots()
|> Enum.map(& &1.name)
# TODO: Validate attributes with custom messages
name = attribute_value(attributes, "name", :default)
index =
attribute_value_as_ast(attributes, "index", %Surface.AST.Literal{value: 0}, compile_meta)
with {:ok, directives, _attrs} <-
collect_directives(@slot_directive_handlers, attributes, meta),
true <- name in defined_slot_names do
{:ok,
%AST.Slot{
name: name,
index: index,
directives: directives,
default: to_ast(children, compile_meta),
props: [],
meta: meta
}}
else
_ ->
short_slot_syntax? = not has_attribute?(attributes, "name")
raise_missing_slot_error!(
meta.caller.module,
name,
meta,
defined_slot_names,
short_slot_syntax?
)
end
end
defp convert_node_to_ast(:tag, {name, attributes, children, node_meta}, compile_meta) do
meta = Helpers.to_meta(node_meta, compile_meta)
with {:ok, directives, attributes} <-
collect_directives(@tag_directive_handlers, attributes, meta),
attributes <- process_attributes(nil, attributes, meta),
children <- to_ast(children, compile_meta),
:ok <- validate_tag_children(children) do
{:ok,
%AST.Tag{
element: name,
attributes: attributes,
directives: directives,
children: children,
meta: meta
}}
else
{:error, message} ->
message = "cannot render <#{name}> (#{message})"
{:error, message}
_ ->
{:error, {"cannot render <#{name}>", meta.line}, meta}
end
end
defp convert_node_to_ast(:void_tag, {name, attributes, children, node_meta}, compile_meta) do
meta = Helpers.to_meta(node_meta, compile_meta)
with {:ok, directives, attributes} <-
collect_directives(@tag_directive_handlers, attributes, meta),
attributes <- process_attributes(nil, attributes, meta),
# a void element containing content is an error
[] <- to_ast(children, compile_meta) do
{:ok,
%AST.VoidTag{
element: name,
attributes: attributes,
directives: directives,
meta: meta
}}
else
{:error, message} ->
message = "cannot render <#{name}> (#{message})"
{:error, message}
_ ->
{:error, {"cannot render <#{name}>", meta.line}, meta}
end
end
defp convert_node_to_ast(:component, {name, attributes, children, node_meta}, compile_meta) do
# TODO: validate live views vs live components ?
meta = Helpers.to_meta(node_meta, compile_meta)
mod = Helpers.actual_component_module!(name, meta.caller)
meta = Map.merge(meta, %{module: mod, node_alias: name})
with :ok <- Helpers.validate_component_module(mod, name),
true <- function_exported?(mod, :component_type, 0),
component_type <- mod.component_type(),
# This is a little bit hacky. :let will only be extracted for the default
# template if `mod` doesn't export __slot_name__ (i.e. if it isn't a slotable component)
# we pass in and modify the attributes so that non-slotable components are not
# processed by the :let directive
{:ok, templates, attributes} <-
collect_templates(mod, attributes, children, meta),
:ok <- validate_templates(mod, templates, meta),
{:ok, directives, attributes} <-
collect_directives(@component_directive_handlers, attributes, meta),
attributes <- process_attributes(mod, attributes, meta),
:ok <- validate_properties(mod, attributes, directives, meta) do
result =
if component_slotable?(mod) do
%AST.SlotableComponent{
module: mod,
slot: mod.__slot_name__(),
type: component_type,
let: [],
props: attributes,
directives: directives,
templates: templates,
meta: meta
}
else
%AST.Component{
module: mod,
type: component_type,
props: attributes,
directives: directives,
templates: templates,
meta: meta
}
end
{:ok, maybe_call_transform(result)}
else
{:error, message, details} ->
{:error, {"cannot render <#{name}> (#{message})", details, meta.line}, meta}
{:error, message} ->
{:error, {"cannot render <#{name}> (#{message})", meta.line}, meta}
_ ->
{:error, {"cannot render <#{name}>", meta.line}, meta}
end
end
defp convert_node_to_ast(
:macro_component,
{"#" <> name, attributes, children, node_meta},
compile_meta
) do
meta = Helpers.to_meta(node_meta, compile_meta)
mod = Helpers.actual_component_module!(name, meta.caller)
meta = Map.merge(meta, %{module: mod, node_alias: name})
with :ok <- Helpers.validate_component_module(mod, name),
meta <- Map.merge(meta, %{module: mod, node_alias: name}),
true <- function_exported?(mod, :expand, 3),
{:ok, directives, attributes} <-
collect_directives(@meta_component_directive_handlers, attributes, meta),
attributes <- process_attributes(mod, attributes, meta),
:ok <- validate_properties(mod, attributes, directives, meta) do
expanded = mod.expand(attributes, children, meta)
{:ok,
%AST.Container{
children: List.wrap(expanded),
directives: directives,
meta: meta
}}
else
false ->
{:error,
{"cannot render <#{name}> (MacroComponents must export an expand/3 function)",
meta.line}, meta}
{:error, message, details} ->
{:error, {"cannot render <#{name}> (#{message})", details, meta.line}, meta}
{:error, message} ->
{:error, {"cannot render <#{name}> (#{message})", meta.line}, meta}
_ ->
{:error, {"cannot render <#{name}>", meta.line}, meta}
end
end
defp maybe_call_transform(%{module: module} = node) do
if function_exported?(module, :transform, 1) do
module.transform(node)
else
node
end
end
defp attribute_value(attributes, attr_name, default) do
Enum.find_value(attributes, default, fn {name, value, _} ->
if name == attr_name do
String.to_atom(value)
end
end)
end
defp has_attribute?([], _), do: false
defp has_attribute?(attributes, attr_name),
do: Enum.any?(attributes, &match?({^attr_name, _, _}, &1))
defp attribute_value_as_ast(attributes, attr_name, default, meta) do
Enum.find_value(attributes, default, fn
{^attr_name, {:attribute_expr, value, expr_meta}, _attr_meta} ->
expr_meta = Helpers.to_meta(expr_meta, meta)
%AST.AttributeExpr{
original: value,
value: Surface.TypeHandler.expr_to_quoted!(value, attr_name, :integer, expr_meta),
meta: expr_meta
}
{^attr_name, value, attr_meta} ->
attr_meta = Helpers.to_meta(attr_meta, meta)
Surface.TypeHandler.literal_to_ast_node!(:integer, attr_name, value, attr_meta)
_ ->
nil
end)
end
defp component_slotable?(mod), do: function_exported?(mod, :__slot_name__, 0)
defp process_attributes(_module, [], _meta), do: []
defp process_attributes(mod, [{name, value, attr_meta} | attrs], meta) do
name = String.to_atom(name)
attr_meta = Helpers.to_meta(attr_meta, meta)
{type, type_opts} = Surface.TypeHandler.attribute_type_and_opts(mod, name, attr_meta)
node = %AST.Attribute{
type: type,
type_opts: type_opts,
name: name,
value: attr_value(name, type, value, attr_meta),
meta: attr_meta
}
[node | process_attributes(mod, attrs, meta)]
end
defp attr_value(name, type, values, attr_meta) when is_list(values) do
{originals, quoted_values} =
Enum.reduce(values, {[], []}, fn
{:attribute_expr, value, expr_meta}, {originals, quoted_values} ->
expr_meta = Helpers.to_meta(expr_meta, attr_meta)
{["{{#{value}}}" | originals], [quote_embedded_expr(value, expr_meta) | quoted_values]}
value, {originals, quoted_values} ->
{[value | originals], [value | quoted_values]}
end)
original = originals |> Enum.reverse() |> Enum.join()
quoted_values = Enum.reverse(quoted_values)
expr_value = {:<<>>, [line: attr_meta.line], quoted_values}
%AST.AttributeExpr{
original: original,
value: Surface.TypeHandler.expr_to_quoted!(expr_value, name, type, attr_meta, original),
meta: attr_meta
}
end
defp attr_value(name, type, {:attribute_expr, value, expr_meta}, attr_meta) do
expr_meta = Helpers.to_meta(expr_meta, attr_meta)
%AST.AttributeExpr{
original: value,
value: Surface.TypeHandler.expr_to_quoted!(value, name, type, expr_meta),
meta: expr_meta
}
end
defp attr_value(name, type, value, meta) do
Surface.TypeHandler.literal_to_ast_node!(type, name, value, meta)
end
defp quote_embedded_expr(value, expr_meta) do
meta = [line: expr_meta.line]
quoted_value = Code.string_to_quoted!(value, meta)
{:"::", meta,
[
{{:., meta, [Kernel, :to_string]}, meta, [quoted_value]},
{:binary, meta, Elixir}
]}
end
defp validate_tag_children([]), do: :ok
defp validate_tag_children([%AST.Template{name: name} | _]) do
{:error,
"templates are only allowed as children elements of components, but found template for #{
name
}"}
end
defp validate_tag_children([_ | nodes]), do: validate_tag_children(nodes)
defp collect_templates(mod, attributes, nodes, meta) do
# Don't extract the template directives if this module is slotable
{:ok, directives, attributes} =
if component_slotable?(mod) do
{:ok, [], attributes}
else
collect_directives(@template_directive_handlers, attributes, meta)
end
templates =
nodes
|> to_ast(meta)
|> Enum.group_by(fn
%AST.Template{name: name} -> name
%AST.SlotableComponent{slot: name} -> name
_ -> :default
end)
{already_wrapped, default_children} =
templates
|> Map.get(:default, [])
|> Enum.split_with(fn
%AST.Template{} -> true
_ -> false
end)
if Enum.all?(default_children, &Helpers.is_blank_or_empty/1) do
{:ok, Map.put(templates, :default, already_wrapped), attributes}
else
wrapped =
process_directives(%AST.Template{
name: :default,
children: default_children,
directives: directives,
let: [],
meta: meta
})
{:ok, Map.put(templates, :default, [wrapped | already_wrapped]), attributes}
end
end
defp collect_directives(handlers, attributes, meta)
defp collect_directives(_, [], _), do: {:ok, [], []}
defp collect_directives(handlers, [attr | attributes], meta) do
{:ok, dirs, attrs} = collect_directives(handlers, attributes, meta)
attr = extract_modifiers(attr)
directives =
handlers
|> Enum.map(fn handler -> handler.extract(attr, meta) end)
|> List.flatten()
attributes =
if Enum.empty?(directives) do
[attr | attrs]
else
attrs
end
directives =
Enum.sort_by(directives ++ dirs, fn %{module: mod} ->
Enum.find_index(handlers, fn handler -> handler == mod end)
end)
{:ok, directives, attributes}
end
defp extract_modifiers({":" <> _ = attr_name, value, meta}) do
{name, modifiers} =
case String.split(attr_name, ".") do
[name] ->
{name, Map.get(meta, :modifiers, [])}
[name | modifiers] ->
{name, modifiers}
end
{name, value, Map.put(meta, :modifiers, modifiers)}
end
defp extract_modifiers(attr) do
attr
end
defp validate_properties(module, props, directives, meta) do
has_directive_props? = Enum.any?(directives, &match?(%AST.Directive{name: :props}, &1))
if not has_directive_props? and function_exported?(module, :__props__, 0) do
existing_props = Enum.map(props, fn %{name: name} -> name end)
required_props =
for p <- module.__props__(), Keyword.get(p.opts, :required, false), do: p.name
missing_props = required_props -- existing_props
for prop <- missing_props do
message = "Missing required property \"#{prop}\" for component <#{meta.node_alias}>"
IOHelper.warn(message, meta.caller, fn _ -> meta.line end)
end
end
:ok
end
defp validate_templates(mod, templates, meta) do
names = Map.keys(templates)
if !function_exported?(mod, :__slots__, 0) and not Enum.empty?(names) do
message = """
parent component `#{inspect(mod)}` does not define any slots. \
Found the following templates: #{inspect(names)}
"""
IOHelper.compile_error(message, meta.file, meta.line)
end
for name <- mod.__required_slots_names__(),
!Map.has_key?(templates, name) or
Enum.all?(Map.get(templates, name, []), &Helpers.is_blank_or_empty/1) do
message = "missing required slot \"#{name}\" for component <#{meta.node_alias}>"
IOHelper.warn(message, meta.caller, fn _ -> meta.line end)
end
for {slot_name, template_instances} <- templates,
mod.__get_slot__(slot_name) == nil,
not component_slotable?(mod),
template <- template_instances do
raise_missing_parent_slot_error!(mod, slot_name, template.meta, meta)
end
for slot_name <- Map.keys(templates),
template <- Map.get(templates, slot_name) do
slot = mod.__get_slot__(slot_name)
props = Keyword.keys(template.let)
prop_meta =
Enum.find_value(template.directives, meta, fn directive ->
if directive.module == Surface.Directive.Let do
directive.meta
end
end)
case slot do
%{opts: opts} ->
non_generator_args = Enum.map(opts[:props] || [], &Map.get(&1, :name))
undefined_keys = props -- non_generator_args
if not Enum.empty?(undefined_keys) do
[prop | _] = undefined_keys
message = """
undefined prop `#{inspect(prop)}` for slot `#{slot_name}` in `#{inspect(mod)}`.
Available props: #{inspect(non_generator_args)}.
Hint: You can define a new slot prop using the `props` option: \
`slot #{slot_name}, props: [..., #{inspect(prop)}]`\
"""
IOHelper.compile_error(message, prop_meta.file, prop_meta.line)
end
_ ->
:ok
end
end
:ok
end
defp raise_missing_slot_error!(
module,
slot_name,
meta,
_defined_slot_names,
true = _short_syntax?
) do
message = """
no slot `#{slot_name}` defined in the component `#{inspect(module)}`
Please declare the default slot using `slot default` in order to use the `<slot />` notation.
"""
IOHelper.compile_error(message, meta.file, meta.line)
end
defp raise_missing_slot_error!(
module,
slot_name,
meta,
defined_slot_names,
false = _short_syntax?
) do
similar_slot_message = similar_slot_message(slot_name, defined_slot_names)
existing_slots_message = existing_slots_message(defined_slot_names)
message = """
no slot `#{slot_name}` defined in the component `#{inspect(module)}`\
#{similar_slot_message}\
#{existing_slots_message}\
Hint: You can define slots using the `slot` macro.\
For instance: `slot #{slot_name}`\
"""
IOHelper.compile_error(message, meta.file, meta.line)
end
defp raise_missing_parent_slot_error!(mod, slot_name, template_meta, parent_meta) do
parent_slots = mod.__slots__() |> Enum.map(& &1.name)
similar_slot_message = similar_slot_message(slot_name, parent_slots)
existing_slots_message = existing_slots_message(parent_slots)
header_message =
if component_slotable?(template_meta.module) do
"""
The slotable component <#{inspect(template_meta.module)}> as the `:slot` option set to \
`#{slot_name}`.
That slot name is not declared in parent component <#{parent_meta.node_alias}>.
Please declare the slot in the parent component or rename the value in the `:slot` option.\
"""
else
"""
no slot "#{slot_name}" defined in parent component <#{parent_meta.node_alias}>\
"""
end
message = """
#{header_message}\
#{similar_slot_message}\
#{existing_slots_message}
"""
IOHelper.compile_error(message, template_meta.file, template_meta.line)
end
defp similar_slot_message(slot_name, list_of_slot_names, opts \\ []) do
threshold = opts[:threshold] || 0.8
case Helpers.did_you_mean(slot_name, list_of_slot_names) do
{similar, score} when score > threshold ->
"\n\nDid you mean #{inspect(to_string(similar))}?"
_ ->
""
end
end
defp existing_slots_message([]), do: ""
defp existing_slots_message(existing_slots) do
slots = Enum.map(existing_slots, &to_string/1)
available = Helpers.list_to_string("slot:", "slots:", slots)
"\n\nAvailable #{available}"
end
end
|
lib/surface/compiler.ex
| 0.777088
| 0.453564
|
compiler.ex
|
starcoder
|
defmodule MarsRoverKata.Instruction do
@moduledoc """
Apply instructions to robot position.
"""
alias MarsRoverKata.Planet
alias MarsRoverKata.Point
alias MarsRoverKata.Position
@spec perform_next(Planet.t(), Position.t(), atom) :: Position.t()
def perform_next(
%Planet{} = planet,
%Position{} = position,
instruction
)
when instruction in [:F, :B] do
position
|> perform(instruction)
|> Planet.to_absolute_position(planet)
end
def perform_next(
%Planet{} = _planet,
%Position{} = position,
instruction
)
when instruction in [:L, :R] do
perform(position, instruction)
end
defp perform(%Position{point: point, direction: :N = direction}, :F),
do: %Position{point: Point.new(point.x, point.y + 1), direction: direction}
defp perform(%Position{point: point, direction: :N = direction}, :B),
do: %Position{point: Point.new(point.x, point.y - 1), direction: direction}
defp perform(%Position{point: point, direction: :S = direction}, :F),
do: %Position{point: Point.new(point.x, point.y - 1), direction: direction}
defp perform(%Position{point: point, direction: :S = direction}, :B),
do: %Position{point: Point.new(point.x, point.y + 1), direction: direction}
defp perform(%Position{point: point, direction: :W = direction}, :F),
do: %Position{point: Point.new(point.x - 1, point.y), direction: direction}
defp perform(%Position{point: point, direction: :W = direction}, :B),
do: %Position{point: Point.new(point.x + 1, point.y), direction: direction}
defp perform(%Position{point: point, direction: :E = direction}, :F),
do: %Position{point: Point.new(point.x + 1, point.y), direction: direction}
defp perform(%Position{point: point, direction: :E = direction}, :B),
do: %Position{point: Point.new(point.x - 1, point.y), direction: direction}
defp perform(%Position{point: point, direction: :N}, :L),
do: %Position{point: point, direction: :W}
defp perform(%Position{point: point, direction: :N}, :R),
do: %Position{point: point, direction: :E}
defp perform(%Position{point: point, direction: :S}, :L),
do: %Position{point: point, direction: :E}
defp perform(%Position{point: point, direction: :S}, :R),
do: %Position{point: point, direction: :W}
defp perform(%Position{point: point, direction: :W}, :L),
do: %Position{point: point, direction: :S}
defp perform(%Position{point: point, direction: :W}, :R),
do: %Position{point: point, direction: :N}
defp perform(%Position{point: point, direction: :E}, :L),
do: %Position{point: point, direction: :N}
defp perform(%Position{point: point, direction: :E}, :R),
do: %Position{point: point, direction: :S}
end
|
lib/mars_rover_kata/instruction.ex
| 0.903084
| 0.730242
|
instruction.ex
|
starcoder
|
defmodule Tournament do
@header "Team | MP | W | D | L | P"
@doc """
Given `input` lines representing two teams and whether the first of them won,
lost, or reached a draw, separated by semicolons, calculate the statistics
for each team's number of games played, won, drawn, lost, and total points
for the season, and return a nicely-formatted string table.
A win earns a team 3 points, a draw earns 1 point, and a loss earns nothing.
Order the outcome by most total points for the season, and settle ties by
listing the teams in alphabetical order.
"""
@spec tally(input :: list(String.t())) :: String.t()
def tally(input) do
Enum.reduce(input, %{}, &tally_acc/2)
|> Enum.map(&summarize/1)
|> Enum.sort_by(fn {_k, v} -> v.points end, :desc)
|> Enum.map(&format/1)
|> List.insert_at(0, @header)
|> Enum.join("\n")
end
@doc """
Increase the tally for a given team's metric by one. If no metrics have been
recorded yet for that team or that metric, it will be set to 1.
"""
@spec bump(records :: map(), team :: String.t(), metric :: atom()) :: map()
defp bump(records, team, metric) do
update_in(records, [Access.key(team, %{}), Access.key(metric, 0)], &(&1 + 1))
end
defp tally_acc(e, acc) do
case String.split(e, ";") do
[team_1, team_2, "win"] ->
acc
|> bump(team_1, :wins)
|> bump(team_2, :losses)
[team_1, team_2, "loss"] ->
acc
|> bump(team_1, :losses)
|> bump(team_2, :wins)
[team_1, team_2, "draw"] ->
acc
|> bump(team_1, :draws)
|> bump(team_2, :draws)
_ -> acc
end
end
@doc """
Given a map of wins, losses, and draws for a given team, calculate the total
number of matches participated in and the overall points the team has from
their wins and draws.
"""
@spec summarize(entry :: {String.t(), map()}) :: {String.t(), map()}
defp summarize({team, results}) do
{
team,
results
|> Map.put(:matches, Map.values(results) |> Enum.sum)
|> Map.put(:points, Map.get(results, :wins, 0) * 3 + Map.get(results, :draws, 0))
}
end
defp format({team, results}) do
Enum.join(
[
String.pad_trailing(team, 30),
results.matches,
Map.get(results, :wins, 0),
Map.get(results, :draws, 0),
Map.get(results, :losses, 0),
results.points
],
" | ")
end
end
|
exercism/tournament/tournament.ex
| 0.87444
| 0.542924
|
tournament.ex
|
starcoder
|
defmodule IdenticonGenerator.Identicon do
@doc """
Hashes a string input and returns the hash within the Image struct as hex
## Examples
iex> hash = IdenticonGenerator.Identicon.hash_input("banana")
iex> Map.get(hash, :hex)
[114, 179, 2, 191, 41, 122, 34, 138, 117, 115, 1, 35, 239, 239, 124, 65]
"""
def hash_input(input) do
hex = :crypto.hash(:md5, input)
|> :binary.bin_to_list()
%IdenticonGenerator.Image{hex: hex}
end
@doc """
Picks a color based on the first three numbers of list as RGB values
## Examples
iex> color = IdenticonGenerator.Identicon.pick_color(%IdenticonGenerator.Image{hex: [200, 201, 202, 100, 123, 123, 255]})
iex> Map.get(color, :color)
{200, 201, 202}
"""
def pick_color(%IdenticonGenerator.Image{hex: [r, g, b | _tail ]} = image) do
%IdenticonGenerator.Image{image | color: {r, g, b}}
end
@doc """
Takes a Image struct and returns an updated struct with grid as hex array chunked by 3 and mirrored, then each item is placed in a tuple with it's list index
## Examples
iex> grid = IdenticonGenerator.Identicon.build_grid(%IdenticonGenerator.Image{hex: [200, 201, 202, 100, 123, 124, 255]})
iex> Map.get(grid, :grid)
[{200, 0}, {201, 1}, {202, 2}, {201, 3}, {200, 4}, {100, 5}, {123, 6}, {124, 7}, {123, 8}, {100, 9}]
"""
def build_grid(%IdenticonGenerator.Image{hex: hex} = image) do
grid =
hex
|> Enum.chunk_every(3, 3, :discard)
|> Enum.map(&mirror_row/1)
|> List.flatten
|> Enum.with_index
%IdenticonGenerator.Image{ image | grid: grid }
end
@doc """
Takes a list and appends the second and first elements to the end of the list
## Examples
iex> IdenticonGenerator.Identicon.mirror_row([200, 201, 202])
[200, 201, 202, 201, 200]
"""
def mirror_row(row) do
[ r, g | _tail ] = row
row ++ [g, r]
end
@doc """
Filters only even tuple values of the grid provided
## Examples
iex> grid = IdenticonGenerator.Identicon.filter_even_squares(%IdenticonGenerator.Image{grid: [{200, 0}, {201, 1}, {202, 2}, {201, 3}, {200, 4}]})
iex> Map.get(grid, :grid)
[{200, 0}, {202, 2}, {200, 4}]
"""
def filter_even_squares(%IdenticonGenerator.Image{grid: grid} = image) do
grid = Enum.filter grid, fn({code, _index}) ->
rem(code, 2) == 0
end
%IdenticonGenerator.Image{ image | grid: grid }
end
@doc """
Builds a pixel map of squares to be colored
"""
def build_pixel_map(%IdenticonGenerator.Image{grid: grid} = image) do
pixel_map = Enum.map grid, fn({ _code, index }) ->
horizontal = rem(index, 5) * 50
vertical = div(index, 5) * 50
top_left = { horizontal, vertical }
bottom_right = { horizontal + 50, vertical + 50 }
{top_left, bottom_right}
end
%IdenticonGenerator.Image{ image | pixel_map: pixel_map }
end
@doc """
Builds an image binary based on the Image struct pixel_map and selected color
"""
def draw_image(%IdenticonGenerator.Image{ color: color, pixel_map: pixel_map }) do
image = :egd.create(250, 250)
fill = :egd.color(color)
Enum.each pixel_map, fn({ start, stop }) ->
:egd.filledRectangle(image, start, stop, fill)
end
:egd.render(image)
end
@doc """
Generates Identicon
"""
def main(input) do
input
|> hash_input
|> pick_color
|> build_grid
|> filter_even_squares
|> build_pixel_map
|> draw_image
end
end
|
lib/identicon_generator/identicon.ex
| 0.90495
| 0.401512
|
identicon.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.